From webhook-mailer at python.org Fri May 1 00:30:18 2020 From: webhook-mailer at python.org (Noah Doersing) Date: Fri, 01 May 2020 04:30:18 -0000 Subject: [Python-checkins] Change 'exception happened' to 'exception occurred' in two places (#19767) Message-ID: https://github.com/python/cpython/commit/8bcfd31cc01e068bca78aa42a87c24aea6ebc6b1 commit: 8bcfd31cc01e068bca78aa42a87c24aea6ebc6b1 branch: master author: Noah Doersing committer: GitHub date: 2020-04-30T21:30:10-07:00 summary: Change 'exception happened' to 'exception occurred' in two places (#19767) files: M Doc/tutorial/errors.rst M Lib/socketserver.py diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst index ab23df9f3ff9a..0ce96466e8c28 100644 --- a/Doc/tutorial/errors.rst +++ b/Doc/tutorial/errors.rst @@ -67,7 +67,7 @@ The rest of the line provides detail based on the type of exception and what caused it. The preceding part of the error message shows the context where the exception -happened, in the form of a stack traceback. In general it contains a stack +occurred, in the form of a stack traceback. In general it contains a stack traceback listing source lines; however, it will not display lines read from standard input. diff --git a/Lib/socketserver.py b/Lib/socketserver.py index 1ad028fa4d08c..57c1ae6e9e8be 100644 --- a/Lib/socketserver.py +++ b/Lib/socketserver.py @@ -374,7 +374,7 @@ def handle_error(self, request, client_address): """ print('-'*40, file=sys.stderr) - print('Exception happened during processing of request from', + print('Exception occurred during processing of request from', client_address, file=sys.stderr) import traceback traceback.print_exc() From webhook-mailer at python.org Fri May 1 05:33:52 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 01 May 2020 09:33:52 -0000 Subject: [Python-checkins] bpo-40453: Add PyConfig._isolated_subinterpreter (GH-19820) Message-ID: https://github.com/python/cpython/commit/252346acd937ddba4845331994b8ff4f90349625 commit: 252346acd937ddba4845331994b8ff4f90349625 branch: master author: Victor Stinner committer: GitHub date: 2020-05-01T11:33:44+02:00 summary: bpo-40453: Add PyConfig._isolated_subinterpreter (GH-19820) An isolated subinterpreter cannot spawn threads, spawn a child process or call os.fork(). * Add private _Py_NewInterpreter(isolated_subinterpreter) function. * Add isolated=True keyword-only parameter to _xxsubinterpreters.create(). * Allow again os.fork() in "non-isolated" subinterpreters. files: A Misc/NEWS.d/next/Library/2020-04-30-22-04-58.bpo-40453.ggz7sl.rst M Doc/c-api/init_config.rst M Include/cpython/initconfig.h M Include/cpython/pylifecycle.h M Lib/test/test__xxsubinterpreters.py M Lib/test/test_embed.py M Modules/_posixsubprocess.c M Modules/_threadmodule.c M Modules/_winapi.c M Modules/_xxsubinterpretersmodule.c M Modules/posixmodule.c M Programs/_testembed.c M Python/initconfig.c M Python/pylifecycle.c diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 49507c8bad3ed..fc82c3eb59024 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -1004,6 +1004,8 @@ Private provisional API: * :c:member:`PyConfig._init_main`: if set to 0, :c:func:`Py_InitializeFromConfig` stops at the "Core" initialization phase. +* :c:member:`PyConfig._isolated_interpreter`: if non-zero, + disallow threads, subprocesses and fork. .. c:function:: PyStatus _Py_InitializeMain(void) diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index 8326c235702bd..df93a5539d48b 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -409,6 +409,10 @@ typedef struct { /* If equal to 0, stop Python initialization before the "main" phase */ int _init_main; + + /* If non-zero, disallow threads, subprocesses, and fork. + Default: 0. */ + int _isolated_interpreter; } PyConfig; PyAPI_FUNC(void) PyConfig_InitPythonConfig(PyConfig *config); diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h index a01e9c94f12d7..eb523b82e182d 100644 --- a/Include/cpython/pylifecycle.h +++ b/Include/cpython/pylifecycle.h @@ -65,6 +65,8 @@ PyAPI_FUNC(int) _Py_CoerceLegacyLocale(int warn); PyAPI_FUNC(int) _Py_LegacyLocaleDetected(int warn); PyAPI_FUNC(char *) _Py_SetLocaleFromEnv(int category); +PyAPI_FUNC(PyThreadState *) _Py_NewInterpreter(int isolated_subinterpreter); + #ifdef __cplusplus } #endif diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index 80eff19152f15..e17bfde2c2f75 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -794,6 +794,7 @@ def f(): self.assertEqual(out, 'it worked!') def test_create_thread(self): + subinterp = interpreters.create(isolated=False) script, file = _captured_script(""" import threading def f(): @@ -804,7 +805,7 @@ def f(): t.join() """) with file: - interpreters.run_string(self.id, script) + interpreters.run_string(subinterp, script) out = file.read() self.assertEqual(out, 'it worked!') diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index 0bdfae1b7e387..3d60b2f330c62 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -406,6 +406,7 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): 'check_hash_pycs_mode': 'default', 'pathconfig_warnings': 1, '_init_main': 1, + '_isolated_interpreter': 0, } if MS_WINDOWS: CONFIG_COMPAT.update({ @@ -766,6 +767,8 @@ def test_init_from_config(self): 'check_hash_pycs_mode': 'always', 'pathconfig_warnings': 0, + + '_isolated_interpreter': 1, } self.check_all_configs("test_init_from_config", config, preconfig, api=API_COMPAT) diff --git a/Misc/NEWS.d/next/Library/2020-04-30-22-04-58.bpo-40453.ggz7sl.rst b/Misc/NEWS.d/next/Library/2020-04-30-22-04-58.bpo-40453.ggz7sl.rst new file mode 100644 index 0000000000000..f20c666d3e27f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-30-22-04-58.bpo-40453.ggz7sl.rst @@ -0,0 +1,3 @@ +Add ``isolated=True`` keyword-only parameter to +``_xxsubinterpreters.create()``. An isolated subinterpreter cannot spawn +threads, spawn a child process or call ``os.fork()``. diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c index 60dd78d92a4f5..add2962189b1c 100644 --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -663,6 +663,14 @@ subprocess_fork_exec(PyObject* self, PyObject *args) return NULL; } + PyInterpreterState *interp = PyInterpreterState_Get(); + const PyConfig *config = _PyInterpreterState_GetConfig(interp); + if (config->_isolated_interpreter) { + PyErr_SetString(PyExc_RuntimeError, + "subprocess not supported for isolated subinterpreters"); + return NULL; + } + /* We need to call gc.disable() when we'll be calling preexec_fn */ if (preexec_fn != Py_None) { PyObject *result; diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index b3d90b22c5a66..77baba4847897 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -1085,6 +1085,14 @@ thread_PyThread_start_new_thread(PyObject *self, PyObject *fargs) "optional 3rd arg must be a dictionary"); return NULL; } + + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (interp->config._isolated_interpreter) { + PyErr_SetString(PyExc_RuntimeError, + "thread is not supported for isolated subinterpreters"); + return NULL; + } + boot = PyMem_NEW(struct bootstate, 1); if (boot == NULL) return PyErr_NoMemory(); diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 1b28adb0b3983..e1672c478522e 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -1080,6 +1080,14 @@ _winapi_CreateProcess_impl(PyObject *module, return NULL; } + PyInterpreterState *interp = PyInterpreterState_Get(); + const PyConfig *config = _PyInterpreterState_GetConfig(interp); + if (config->_isolated_interpreter) { + PyErr_SetString(PyExc_RuntimeError, + "subprocess not supported for isolated subinterpreters"); + return NULL; + } + ZeroMemory(&si, sizeof(si)); si.StartupInfo.cb = sizeof(si); diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 15e80559ec6f6..de11c090870f9 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1999,16 +1999,20 @@ _global_channels(void) { } static PyObject * -interp_create(PyObject *self, PyObject *args) +interp_create(PyObject *self, PyObject *args, PyObject *kwds) { - if (!PyArg_UnpackTuple(args, "create", 0, 0)) { + + static char *kwlist[] = {"isolated", NULL}; + int isolated = 1; + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|$i:create", kwlist, + &isolated)) { return NULL; } // Create and initialize the new interpreter. PyThreadState *save_tstate = PyThreadState_Swap(NULL); // XXX Possible GILState issues? - PyThreadState *tstate = Py_NewInterpreter(); + PyThreadState *tstate = _Py_NewInterpreter(isolated); PyThreadState_Swap(save_tstate); if (tstate == NULL) { /* Since no new thread state was created, there is no exception to @@ -2547,8 +2551,8 @@ channel__channel_id(PyObject *self, PyObject *args, PyObject *kwds) } static PyMethodDef module_functions[] = { - {"create", (PyCFunction)interp_create, - METH_VARARGS, create_doc}, + {"create", (PyCFunction)(void(*)(void))interp_create, + METH_VARARGS | METH_KEYWORDS, create_doc}, {"destroy", (PyCFunction)(void(*)(void))interp_destroy, METH_VARARGS | METH_KEYWORDS, destroy_doc}, {"list_all", interp_list_all, diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 3d3f6ac969926..0163b0757aefa 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -6243,9 +6243,10 @@ os_fork_impl(PyObject *module) /*[clinic end generated code: output=3626c81f98985d49 input=13c956413110eeaa]*/ { pid_t pid; - - if (_PyInterpreterState_GET() != PyInterpreterState_Main()) { - PyErr_SetString(PyExc_RuntimeError, "fork not supported for subinterpreters"); + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (interp->config._isolated_interpreter) { + PyErr_SetString(PyExc_RuntimeError, + "fork not supported for isolated subinterpreters"); return NULL; } if (PySys_Audit("os.fork", NULL) < 0) { diff --git a/Programs/_testembed.c b/Programs/_testembed.c index 2cf0d71b470bf..5c83678f650d0 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -603,6 +603,8 @@ static int test_init_from_config(void) Py_FrozenFlag = 0; config.pathconfig_warnings = 0; + config._isolated_interpreter = 1; + init_from_config_clear(&config); dump_config(); diff --git a/Python/initconfig.c b/Python/initconfig.c index 58cca562f336d..185935c05fb28 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -632,6 +632,7 @@ _PyConfig_InitCompatConfig(PyConfig *config) config->check_hash_pycs_mode = NULL; config->pathconfig_warnings = -1; config->_init_main = 1; + config->_isolated_interpreter = 0; #ifdef MS_WINDOWS config->legacy_windows_stdio = -1; #endif @@ -850,6 +851,7 @@ _PyConfig_Copy(PyConfig *config, const PyConfig *config2) COPY_WSTR_ATTR(check_hash_pycs_mode); COPY_ATTR(pathconfig_warnings); COPY_ATTR(_init_main); + COPY_ATTR(_isolated_interpreter); #undef COPY_ATTR #undef COPY_WSTR_ATTR @@ -949,6 +951,7 @@ config_as_dict(const PyConfig *config) SET_ITEM_WSTR(check_hash_pycs_mode); SET_ITEM_INT(pathconfig_warnings); SET_ITEM_INT(_init_main); + SET_ITEM_INT(_isolated_interpreter); return dict; diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 7909cdbf5b772..5726a559cfcb7 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1526,7 +1526,7 @@ Py_Finalize(void) */ static PyStatus -new_interpreter(PyThreadState **tstate_p) +new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) { PyStatus status; @@ -1573,6 +1573,7 @@ new_interpreter(PyThreadState **tstate_p) if (_PyStatus_EXCEPTION(status)) { goto error; } + interp->config._isolated_interpreter = isolated_subinterpreter; status = pycore_interp_init(tstate); if (_PyStatus_EXCEPTION(status)) { @@ -1606,10 +1607,10 @@ new_interpreter(PyThreadState **tstate_p) } PyThreadState * -Py_NewInterpreter(void) +_Py_NewInterpreter(int isolated_subinterpreter) { PyThreadState *tstate = NULL; - PyStatus status = new_interpreter(&tstate); + PyStatus status = new_interpreter(&tstate, isolated_subinterpreter); if (_PyStatus_EXCEPTION(status)) { Py_ExitStatusException(status); } @@ -1617,6 +1618,12 @@ Py_NewInterpreter(void) } +PyThreadState * +Py_NewInterpreter(void) +{ + return _Py_NewInterpreter(0); +} + /* Delete an interpreter and its last thread. This requires that the given thread state is current, that the thread has no remaining frames, and that it is its interpreter's only remaining thread. From webhook-mailer at python.org Fri May 1 07:32:31 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 01 May 2020 11:32:31 -0000 Subject: [Python-checkins] bpo-40334: Simplify type handling in the PEG c_generator (GH-19818) Message-ID: https://github.com/python/cpython/commit/b796b3fb48283412d3caf52323c69690e5818d3d commit: b796b3fb48283412d3caf52323c69690e5818d3d branch: master author: Pablo Galindo committer: GitHub date: 2020-05-01T12:32:26+01:00 summary: bpo-40334: Simplify type handling in the PEG c_generator (GH-19818) files: M Parser/pegen/parse.c M Tools/peg_generator/pegen/c_generator.py M Tools/peg_generator/pegen/parser_generator.py diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 9c941ca1ee2ec..2be5e384ae532 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -676,7 +676,7 @@ file_rule(Parser *p) int mark = p->mark; { // statements? $ void *a; - void *endmarker_var; + Token * endmarker_var; if ( (a = statements_rule(p), 1) && @@ -738,7 +738,7 @@ eval_rule(Parser *p) { // expressions NEWLINE* $ asdl_seq * _loop0_1_var; expr_ty a; - void *endmarker_var; + Token * endmarker_var; if ( (a = expressions_rule(p)) && @@ -774,10 +774,10 @@ func_type_rule(Parser *p) asdl_seq * _loop0_2_var; void *a; expr_ty b; - void *endmarker_var; - void *literal; - void *literal_1; - void *literal_2; + Token * endmarker_var; + Token * literal; + Token * literal_1; + Token * literal_2; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -850,10 +850,10 @@ type_expressions_rule(Parser *p) asdl_seq * a; expr_ty b; expr_ty c; - void *literal; - void *literal_1; - void *literal_2; - void *literal_3; + Token * literal; + Token * literal_1; + Token * literal_2; + Token * literal_3; if ( (a = _gather_3_rule(p)) && @@ -882,8 +882,8 @@ type_expressions_rule(Parser *p) { // ','.expression+ ',' '*' expression asdl_seq * a; expr_ty b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = _gather_5_rule(p)) && @@ -906,8 +906,8 @@ type_expressions_rule(Parser *p) { // ','.expression+ ',' '**' expression asdl_seq * a; expr_ty b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = _gather_7_rule(p)) && @@ -1031,7 +1031,7 @@ statement_newline_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // compound_stmt NEWLINE stmt_ty a; - void *newline_var; + Token * newline_var; if ( (a = compound_stmt_rule(p)) && @@ -1059,7 +1059,7 @@ statement_newline_rule(Parser *p) p->mark = mark; } { // NEWLINE - void *newline_var; + Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) ) @@ -1082,7 +1082,7 @@ statement_newline_rule(Parser *p) p->mark = mark; } { // $ - void *endmarker_var; + Token * endmarker_var; if ( (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) ) @@ -1112,7 +1112,7 @@ simple_stmt_rule(Parser *p) int mark = p->mark; { // small_stmt !';' NEWLINE stmt_ty a; - void *newline_var; + Token * newline_var; if ( (a = small_stmt_rule(p)) && @@ -1132,7 +1132,7 @@ simple_stmt_rule(Parser *p) } { // ';'.small_stmt+ ';'? NEWLINE asdl_seq * a; - void *newline_var; + Token * newline_var; void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( @@ -1263,7 +1263,7 @@ small_stmt_rule(Parser *p) p->mark = mark; } { // 'pass' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 502)) ) @@ -1325,7 +1325,7 @@ small_stmt_rule(Parser *p) p->mark = mark; } { // 'break' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 506)) ) @@ -1348,7 +1348,7 @@ small_stmt_rule(Parser *p) p->mark = mark; } { // 'continue' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 507)) ) @@ -1540,7 +1540,7 @@ assignment_rule(Parser *p) expr_ty a; expr_ty b; void *c; - void *literal; + Token * literal; if ( (a = _PyPegen_name_token(p)) && @@ -1572,7 +1572,7 @@ assignment_rule(Parser *p) void *a; expr_ty b; void *c; - void *literal; + Token * literal; if ( (a = _tmp_20_rule(p)) && @@ -1697,7 +1697,7 @@ augassign_rule(Parser *p) AugOperator* res = NULL; int mark = p->mark; { // '+=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 36)) ) @@ -1712,7 +1712,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '-=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 37)) ) @@ -1727,7 +1727,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '*=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 38)) ) @@ -1742,7 +1742,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '@=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 50)) ) @@ -1757,7 +1757,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '/=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 39)) ) @@ -1772,7 +1772,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '%=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 40)) ) @@ -1787,7 +1787,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '&=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 41)) ) @@ -1802,7 +1802,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '|=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 42)) ) @@ -1817,7 +1817,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '^=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 43)) ) @@ -1832,7 +1832,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '<<=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 44)) ) @@ -1847,7 +1847,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '>>=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 45)) ) @@ -1862,7 +1862,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '**=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 46)) ) @@ -1877,7 +1877,7 @@ augassign_rule(Parser *p) p->mark = mark; } { // '//=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 48)) ) @@ -1915,7 +1915,7 @@ global_stmt_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // 'global' ','.NAME+ asdl_seq * a; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 508)) && @@ -1963,7 +1963,7 @@ nonlocal_stmt_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // 'nonlocal' ','.NAME+ asdl_seq * a; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 509)) && @@ -2057,7 +2057,7 @@ assert_stmt_rule(Parser *p) { // 'assert' expression [',' expression] expr_ty a; void *b; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 505)) && @@ -2107,7 +2107,7 @@ del_stmt_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // 'del' del_targets asdl_seq* a; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 503)) && @@ -2191,7 +2191,7 @@ import_name_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // 'import' dotted_as_names asdl_seq* a; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 513)) && @@ -2243,8 +2243,8 @@ import_from_rule(Parser *p) asdl_seq * a; expr_ty b; asdl_seq* c; - void *keyword; - void *keyword_1; + Token * keyword; + Token * keyword_1; if ( (keyword = _PyPegen_expect_token(p, 514)) && @@ -2277,8 +2277,8 @@ import_from_rule(Parser *p) { // 'from' (('.' | '...'))+ 'import' import_from_targets asdl_seq * a; asdl_seq* b; - void *keyword; - void *keyword_1; + Token * keyword; + Token * keyword_1; if ( (keyword = _PyPegen_expect_token(p, 514)) && @@ -2322,8 +2322,8 @@ import_from_targets_rule(Parser *p) int mark = p->mark; { // '(' import_from_as_names ','? ')' asdl_seq* a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( @@ -2357,7 +2357,7 @@ import_from_targets_rule(Parser *p) p->mark = mark; } { // '*' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 16)) ) @@ -2535,7 +2535,7 @@ dotted_name_raw(Parser *p) { // dotted_name '.' NAME expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = dotted_name_rule(p)) && @@ -2592,8 +2592,8 @@ if_stmt_rule(Parser *p) expr_ty a; asdl_seq* b; stmt_ty c; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 510)) && @@ -2627,8 +2627,8 @@ if_stmt_rule(Parser *p) expr_ty a; asdl_seq* b; void *c; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 510)) && @@ -2686,8 +2686,8 @@ elif_stmt_rule(Parser *p) expr_ty a; asdl_seq* b; stmt_ty c; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 515)) && @@ -2721,8 +2721,8 @@ elif_stmt_rule(Parser *p) expr_ty a; asdl_seq* b; void *c; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 515)) && @@ -2768,8 +2768,8 @@ else_block_rule(Parser *p) int mark = p->mark; { // 'else' ':' block asdl_seq* b; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 516)) && @@ -2813,8 +2813,8 @@ while_stmt_rule(Parser *p) expr_ty a; asdl_seq* b; void *c; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 512)) && @@ -2872,9 +2872,9 @@ for_stmt_rule(Parser *p) asdl_seq* b; void *el; expr_ty ex; - void *keyword; - void *keyword_1; - void *literal; + Token * keyword; + Token * keyword_1; + Token * literal; expr_ty t; void *tc; if ( @@ -2913,13 +2913,13 @@ for_stmt_rule(Parser *p) p->mark = mark; } { // ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? - void *async_var; + Token * async_var; asdl_seq* b; void *el; expr_ty ex; - void *keyword; - void *keyword_1; - void *literal; + Token * keyword; + Token * keyword_1; + Token * literal; expr_ty t; void *tc; if ( @@ -2988,10 +2988,10 @@ with_stmt_rule(Parser *p) { // 'with' '(' ','.with_item+ ')' ':' block asdl_seq * a; asdl_seq* b; - void *keyword; - void *literal; - void *literal_1; - void *literal_2; + Token * keyword; + Token * literal; + Token * literal_1; + Token * literal_2; if ( (keyword = _PyPegen_expect_token(p, 519)) && @@ -3026,8 +3026,8 @@ with_stmt_rule(Parser *p) { // 'with' ','.with_item+ ':' TYPE_COMMENT? block asdl_seq * a; asdl_seq* b; - void *keyword; - void *literal; + Token * keyword; + Token * literal; void *tc; if ( (keyword = _PyPegen_expect_token(p, 519)) @@ -3060,12 +3060,12 @@ with_stmt_rule(Parser *p) } { // ASYNC 'with' '(' ','.with_item+ ')' ':' block asdl_seq * a; - void *async_var; + Token * async_var; asdl_seq* b; - void *keyword; - void *literal; - void *literal_1; - void *literal_2; + Token * keyword; + Token * literal; + Token * literal_1; + Token * literal_2; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) && @@ -3101,10 +3101,10 @@ with_stmt_rule(Parser *p) } { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block asdl_seq * a; - void *async_var; + Token * async_var; asdl_seq* b; - void *keyword; - void *literal; + Token * keyword; + Token * literal; void *tc; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) @@ -3196,8 +3196,8 @@ try_stmt_rule(Parser *p) { // 'try' ':' block finally_block asdl_seq* b; asdl_seq* f; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 511)) && @@ -3230,8 +3230,8 @@ try_stmt_rule(Parser *p) void *el; asdl_seq * ex; void *f; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 511)) && @@ -3288,8 +3288,8 @@ except_block_rule(Parser *p) { // 'except' expression ['as' target] ':' block asdl_seq* b; expr_ty e; - void *keyword; - void *literal; + Token * keyword; + Token * literal; void *t; if ( (keyword = _PyPegen_expect_token(p, 520)) @@ -3322,8 +3322,8 @@ except_block_rule(Parser *p) } { // 'except' ':' block asdl_seq* b; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 520)) && @@ -3365,8 +3365,8 @@ finally_block_rule(Parser *p) int mark = p->mark; { // 'finally' ':' block asdl_seq* a; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 521)) && @@ -3408,7 +3408,7 @@ return_stmt_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // 'return' star_expressions? void *a; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 500)) && @@ -3457,7 +3457,7 @@ raise_stmt_rule(Parser *p) { // 'raise' expression ['from' expression] expr_ty a; void *b; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 501)) && @@ -3484,7 +3484,7 @@ raise_stmt_rule(Parser *p) p->mark = mark; } { // 'raise' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 501)) ) @@ -3576,10 +3576,10 @@ function_def_raw_rule(Parser *p) { // 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block void *a; asdl_seq* b; - void *keyword; - void *literal; - void *literal_1; - void *literal_2; + Token * keyword; + Token * literal; + Token * literal_1; + Token * literal_2; expr_ty n; void *params; void *tc; @@ -3622,12 +3622,12 @@ function_def_raw_rule(Parser *p) } { // ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block void *a; - void *async_var; + Token * async_var; asdl_seq* b; - void *keyword; - void *literal; - void *literal_1; - void *literal_2; + Token * keyword; + Token * literal; + Token * literal_1; + Token * literal_2; expr_ty n; void *params; void *tc; @@ -3688,8 +3688,8 @@ func_type_comment_rule(Parser *p) PyObject* res = NULL; int mark = p->mark; { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) - void *newline_var; - void *t; + Token * newline_var; + Token * t; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) && @@ -3719,7 +3719,7 @@ func_type_comment_rule(Parser *p) p->mark = mark; } { // TYPE_COMMENT - void *type_comment_var; + Token * type_comment_var; if ( (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) ) @@ -3899,8 +3899,8 @@ slash_no_default_rule(Parser *p) int mark = p->mark; { // param_no_default+ '/' ',' asdl_seq * a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = _loop1_59_rule(p)) && @@ -3920,7 +3920,7 @@ slash_no_default_rule(Parser *p) } { // param_no_default+ '/' &')' asdl_seq * a; - void *literal; + Token * literal; if ( (a = _loop1_60_rule(p)) && @@ -3957,8 +3957,8 @@ slash_with_default_rule(Parser *p) { // param_no_default* param_with_default+ '/' ',' asdl_seq * a; asdl_seq * b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = _loop0_61_rule(p)) && @@ -3981,7 +3981,7 @@ slash_with_default_rule(Parser *p) { // param_no_default* param_with_default+ '/' &')' asdl_seq * a; asdl_seq * b; - void *literal; + Token * literal; if ( (a = _loop0_63_rule(p)) && @@ -4022,7 +4022,7 @@ star_etc_rule(Parser *p) arg_ty a; asdl_seq * b; void *c; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 16)) && @@ -4045,8 +4045,8 @@ star_etc_rule(Parser *p) { // '*' ',' param_maybe_default+ kwds? asdl_seq * b; void *c; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 16)) && @@ -4097,7 +4097,7 @@ kwds_rule(Parser *p) int mark = p->mark; { // '**' param_no_default arg_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 35)) && @@ -4129,7 +4129,7 @@ param_no_default_rule(Parser *p) int mark = p->mark; { // param ',' TYPE_COMMENT? arg_ty a; - void *literal; + Token * literal; void *tc; if ( (a = param_rule(p)) @@ -4185,7 +4185,7 @@ param_with_default_rule(Parser *p) { // param default ',' TYPE_COMMENT? arg_ty a; expr_ty c; - void *literal; + Token * literal; void *tc; if ( (a = param_rule(p)) @@ -4248,7 +4248,7 @@ param_maybe_default_rule(Parser *p) { // param default? ',' TYPE_COMMENT? arg_ty a; void *c; - void *literal; + Token * literal; void *tc; if ( (a = param_rule(p)) @@ -4356,7 +4356,7 @@ annotation_rule(Parser *p) int mark = p->mark; { // ':' expression expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 11)) && @@ -4388,7 +4388,7 @@ default_rule(Parser *p) int mark = p->mark; { // '=' expression expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 22)) && @@ -4502,8 +4502,8 @@ class_def_raw_rule(Parser *p) expr_ty a; void *b; asdl_seq* c; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 523)) && @@ -4551,9 +4551,9 @@ block_rule(Parser *p) int mark = p->mark; { // NEWLINE INDENT statements DEDENT asdl_seq* a; - void *dedent_var; - void *indent_var; - void *newline_var; + Token * dedent_var; + Token * indent_var; + Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) && @@ -4686,7 +4686,7 @@ star_expressions_rule(Parser *p) } { // star_expression ',' expr_ty a; - void *literal; + Token * literal; if ( (a = star_expression_rule(p)) && @@ -4747,7 +4747,7 @@ star_expression_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 16)) && @@ -4840,7 +4840,7 @@ star_named_expression_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 16)) && @@ -4900,7 +4900,7 @@ named_expression_rule(Parser *p) { // NAME ':=' expression expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = _PyPegen_name_token(p)) && @@ -5040,7 +5040,7 @@ expressions_rule(Parser *p) } { // expression ',' expr_ty a; - void *literal; + Token * literal; if ( (a = expression_rule(p)) && @@ -5103,8 +5103,8 @@ expression_rule(Parser *p) expr_ty a; expr_ty b; expr_ty c; - void *keyword; - void *keyword_1; + Token * keyword; + Token * keyword_1; if ( (a = disjunction_rule(p)) && @@ -5182,8 +5182,8 @@ lambdef_rule(Parser *p) { // 'lambda' lambda_parameters? ':' expression void *a; expr_ty b; - void *keyword; - void *literal; + Token * keyword; + Token * literal; if ( (keyword = _PyPegen_expect_token(p, 524)) && @@ -5345,8 +5345,8 @@ lambda_slash_without_default_rule(Parser *p) int mark = p->mark; { // lambda_plain_names ',' '/' asdl_seq* a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = lambda_plain_names_rule(p)) && @@ -5381,8 +5381,8 @@ lambda_slash_with_default_rule(Parser *p) { // [lambda_plain_names ','] lambda_names_with_default ',' '/' void *a; asdl_seq* b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = _tmp_83_rule(p), 1) && @@ -5423,7 +5423,7 @@ lambda_star_etc_rule(Parser *p) arg_ty a; asdl_seq * b; void *c; - void *literal; + Token * literal; void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( @@ -5450,7 +5450,7 @@ lambda_star_etc_rule(Parser *p) { // '*' lambda_name_with_optional_default+ [',' lambda_kwds] ','? asdl_seq * b; void *c; - void *literal; + Token * literal; void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( @@ -5508,7 +5508,7 @@ lambda_name_with_optional_default_rule(Parser *p) { // ',' lambda_plain_name ['=' expression] arg_ty a; void *b; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 12)) && @@ -5571,7 +5571,7 @@ lambda_name_with_default_rule(Parser *p) int mark = p->mark; { // lambda_plain_name '=' expression expr_ty e; - void *literal; + Token * literal; arg_ty n; if ( (n = lambda_plain_name_rule(p)) @@ -5680,7 +5680,7 @@ lambda_kwds_rule(Parser *p) int mark = p->mark; { // '**' lambda_plain_name arg_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 35)) && @@ -5846,7 +5846,7 @@ inversion_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // 'not' inversion expr_ty a; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 525)) && @@ -6091,7 +6091,7 @@ eq_bitwise_or_rule(Parser *p) int mark = p->mark; { // '==' bitwise_or expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 27)) && @@ -6155,7 +6155,7 @@ lte_bitwise_or_rule(Parser *p) int mark = p->mark; { // '<=' bitwise_or expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 29)) && @@ -6187,7 +6187,7 @@ lt_bitwise_or_rule(Parser *p) int mark = p->mark; { // '<' bitwise_or expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 20)) && @@ -6219,7 +6219,7 @@ gte_bitwise_or_rule(Parser *p) int mark = p->mark; { // '>=' bitwise_or expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 30)) && @@ -6251,7 +6251,7 @@ gt_bitwise_or_rule(Parser *p) int mark = p->mark; { // '>' bitwise_or expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 21)) && @@ -6283,8 +6283,8 @@ notin_bitwise_or_rule(Parser *p) int mark = p->mark; { // 'not' 'in' bitwise_or expr_ty a; - void *keyword; - void *keyword_1; + Token * keyword; + Token * keyword_1; if ( (keyword = _PyPegen_expect_token(p, 525)) && @@ -6318,7 +6318,7 @@ in_bitwise_or_rule(Parser *p) int mark = p->mark; { // 'in' bitwise_or expr_ty a; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 518)) && @@ -6350,8 +6350,8 @@ isnot_bitwise_or_rule(Parser *p) int mark = p->mark; { // 'is' 'not' bitwise_or expr_ty a; - void *keyword; - void *keyword_1; + Token * keyword; + Token * keyword_1; if ( (keyword = _PyPegen_expect_token(p, 526)) && @@ -6385,7 +6385,7 @@ is_bitwise_or_rule(Parser *p) int mark = p->mark; { // 'is' bitwise_or expr_ty a; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 526)) && @@ -6451,7 +6451,7 @@ bitwise_or_raw(Parser *p) { // bitwise_or '|' bitwise_xor expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = bitwise_or_rule(p)) && @@ -6538,7 +6538,7 @@ bitwise_xor_raw(Parser *p) { // bitwise_xor '^' bitwise_and expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = bitwise_xor_rule(p)) && @@ -6625,7 +6625,7 @@ bitwise_and_raw(Parser *p) { // bitwise_and '&' shift_expr expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = bitwise_and_rule(p)) && @@ -6712,7 +6712,7 @@ shift_expr_raw(Parser *p) { // shift_expr '<<' sum expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = shift_expr_rule(p)) && @@ -6741,7 +6741,7 @@ shift_expr_raw(Parser *p) { // shift_expr '>>' sum expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = shift_expr_rule(p)) && @@ -6828,7 +6828,7 @@ sum_raw(Parser *p) { // sum '+' term expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = sum_rule(p)) && @@ -6857,7 +6857,7 @@ sum_raw(Parser *p) { // sum '-' term expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = sum_rule(p)) && @@ -6950,7 +6950,7 @@ term_raw(Parser *p) { // term '*' factor expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = term_rule(p)) && @@ -6979,7 +6979,7 @@ term_raw(Parser *p) { // term '/' factor expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = term_rule(p)) && @@ -7008,7 +7008,7 @@ term_raw(Parser *p) { // term '//' factor expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = term_rule(p)) && @@ -7037,7 +7037,7 @@ term_raw(Parser *p) { // term '%' factor expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = term_rule(p)) && @@ -7066,7 +7066,7 @@ term_raw(Parser *p) { // term '@' factor expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = term_rule(p)) && @@ -7129,7 +7129,7 @@ factor_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // '+' factor expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 14)) && @@ -7155,7 +7155,7 @@ factor_rule(Parser *p) } { // '-' factor expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 15)) && @@ -7181,7 +7181,7 @@ factor_rule(Parser *p) } { // '~' factor expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 31)) && @@ -7242,7 +7242,7 @@ power_rule(Parser *p) { // await_primary '**' factor expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = await_primary_rule(p)) && @@ -7305,7 +7305,7 @@ await_primary_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // AWAIT primary expr_ty a; - void *await_var; + Token * await_var; if ( (await_var = _PyPegen_expect_token(p, AWAIT)) && @@ -7396,7 +7396,7 @@ primary_raw(Parser *p) { // primary '.' NAME expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = primary_rule(p)) && @@ -7451,8 +7451,8 @@ primary_raw(Parser *p) { // primary '(' arguments? ')' expr_ty a; void *b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = primary_rule(p)) && @@ -7483,8 +7483,8 @@ primary_raw(Parser *p) { // primary '[' slices ']' expr_ty a; expr_ty b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = primary_rule(p)) && @@ -7615,7 +7615,7 @@ slice_rule(Parser *p) void *a; void *b; void *c; - void *literal; + Token * literal; if ( (a = expression_rule(p), 1) && @@ -7703,7 +7703,7 @@ atom_rule(Parser *p) p->mark = mark; } { // 'True' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 527)) ) @@ -7726,7 +7726,7 @@ atom_rule(Parser *p) p->mark = mark; } { // 'False' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 528)) ) @@ -7749,7 +7749,7 @@ atom_rule(Parser *p) p->mark = mark; } { // 'None' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 529)) ) @@ -7772,7 +7772,7 @@ atom_rule(Parser *p) p->mark = mark; } { // '__new_parser__' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 530)) ) @@ -7850,7 +7850,7 @@ atom_rule(Parser *p) p->mark = mark; } { // '...' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 52)) ) @@ -7928,8 +7928,8 @@ list_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // '[' star_named_expressions? ']' void *a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 9)) && @@ -7980,8 +7980,8 @@ listcomp_rule(Parser *p) { // '[' named_expression for_if_clauses ']' expr_ty a; asdl_seq* b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 9)) && @@ -8044,8 +8044,8 @@ tuple_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // '(' [star_named_expression ',' star_named_expressions?] ')' void *a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -8087,8 +8087,8 @@ group_rule(Parser *p) int mark = p->mark; { // '(' (yield_expr | named_expression) ')' void *a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -8131,8 +8131,8 @@ genexp_rule(Parser *p) { // '(' expression for_if_clauses ')' expr_ty a; asdl_seq* b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -8195,8 +8195,8 @@ set_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // '{' expressions_list '}' asdl_seq* a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 25)) && @@ -8247,8 +8247,8 @@ setcomp_rule(Parser *p) { // '{' expression for_if_clauses '}' expr_ty a; asdl_seq* b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 25)) && @@ -8311,8 +8311,8 @@ dict_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // '{' kvpairs? '}' void *a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 25)) && @@ -8363,8 +8363,8 @@ dictcomp_rule(Parser *p) { // '{' kvpair for_if_clauses '}' KeyValuePair* a; asdl_seq* b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 25)) && @@ -8441,7 +8441,7 @@ kvpair_rule(Parser *p) int mark = p->mark; { // '**' bitwise_or expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 35)) && @@ -8460,7 +8460,7 @@ kvpair_rule(Parser *p) { // expression ':' expression expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = expression_rule(p)) && @@ -8521,11 +8521,11 @@ for_if_clause_rule(Parser *p) int mark = p->mark; { // ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* expr_ty a; - void *async_var; + Token * async_var; expr_ty b; asdl_seq * c; - void *keyword; - void *keyword_1; + Token * keyword; + Token * keyword_1; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) && @@ -8553,8 +8553,8 @@ for_if_clause_rule(Parser *p) expr_ty a; expr_ty b; asdl_seq * c; - void *keyword; - void *keyword_1; + Token * keyword; + Token * keyword_1; if ( (keyword = _PyPegen_expect_token(p, 517)) && @@ -8600,8 +8600,8 @@ yield_expr_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // 'yield' 'from' expression expr_ty a; - void *keyword; - void *keyword_1; + Token * keyword; + Token * keyword_1; if ( (keyword = _PyPegen_expect_token(p, 504)) && @@ -8629,7 +8629,7 @@ yield_expr_rule(Parser *p) } { // 'yield' star_expressions? void *a; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 504)) && @@ -8819,7 +8819,7 @@ kwargs_rule(Parser *p) { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ asdl_seq * a; asdl_seq * b; - void *literal; + Token * literal; if ( (a = _gather_113_rule(p)) && @@ -8883,7 +8883,7 @@ starred_expression_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // '*' expression expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 16)) && @@ -8932,7 +8932,7 @@ kwarg_or_starred_rule(Parser *p) { // NAME '=' expression expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = _PyPegen_name_token(p)) && @@ -8998,7 +8998,7 @@ kwarg_or_double_starred_rule(Parser *p) { // NAME '=' expression expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = _PyPegen_name_token(p)) && @@ -9026,7 +9026,7 @@ kwarg_or_double_starred_rule(Parser *p) } { // '**' expression expr_ty a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 35)) && @@ -9182,7 +9182,7 @@ star_target_rule(Parser *p) UNUSED(start_col_offset); // Only used by EXTRA macro { // '*' (!'*' star_target) void *a; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 16)) && @@ -9209,7 +9209,7 @@ star_target_rule(Parser *p) { // t_primary '.' NAME !t_lookahead expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = t_primary_rule(p)) && @@ -9240,8 +9240,8 @@ star_target_rule(Parser *p) { // t_primary '[' slices ']' !t_lookahead expr_ty a; expr_ty b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = t_primary_rule(p)) && @@ -9326,8 +9326,8 @@ star_atom_rule(Parser *p) } { // '(' star_target ')' expr_ty a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -9347,8 +9347,8 @@ star_atom_rule(Parser *p) } { // '(' star_targets_seq? ')' void *a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -9376,8 +9376,8 @@ star_atom_rule(Parser *p) } { // '[' star_targets_seq? ']' void *a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 9)) && @@ -9448,8 +9448,8 @@ inside_paren_ann_assign_target_rule(Parser *p) } { // '(' inside_paren_ann_assign_target ')' expr_ty a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -9494,7 +9494,7 @@ ann_assign_subscript_attribute_target_rule(Parser *p) { // t_primary '.' NAME !t_lookahead expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = t_primary_rule(p)) && @@ -9525,8 +9525,8 @@ ann_assign_subscript_attribute_target_rule(Parser *p) { // t_primary '[' slices ']' !t_lookahead expr_ty a; expr_ty b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = t_primary_rule(p)) && @@ -9619,7 +9619,7 @@ del_target_rule(Parser *p) { // t_primary '.' NAME !t_lookahead expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = t_primary_rule(p)) && @@ -9650,8 +9650,8 @@ del_target_rule(Parser *p) { // t_primary '[' slices ']' !t_lookahead expr_ty a; expr_ty b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = t_primary_rule(p)) && @@ -9732,8 +9732,8 @@ del_t_atom_rule(Parser *p) } { // '(' del_target ')' expr_ty a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -9753,8 +9753,8 @@ del_t_atom_rule(Parser *p) } { // '(' del_targets? ')' void *a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -9782,8 +9782,8 @@ del_t_atom_rule(Parser *p) } { // '[' del_targets? ']' void *a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 9)) && @@ -9872,7 +9872,7 @@ target_rule(Parser *p) { // t_primary '.' NAME !t_lookahead expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = t_primary_rule(p)) && @@ -9903,8 +9903,8 @@ target_rule(Parser *p) { // t_primary '[' slices ']' !t_lookahead expr_ty a; expr_ty b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = t_primary_rule(p)) && @@ -10001,7 +10001,7 @@ t_primary_raw(Parser *p) { // t_primary '.' NAME &t_lookahead expr_ty a; expr_ty b; - void *literal; + Token * literal; if ( (a = t_primary_rule(p)) && @@ -10032,8 +10032,8 @@ t_primary_raw(Parser *p) { // t_primary '[' slices ']' &t_lookahead expr_ty a; expr_ty b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = t_primary_rule(p)) && @@ -10094,8 +10094,8 @@ t_primary_raw(Parser *p) { // t_primary '(' arguments? ')' &t_lookahead expr_ty a; void *b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (a = t_primary_rule(p)) && @@ -10157,7 +10157,7 @@ t_lookahead_rule(Parser *p) void * res = NULL; int mark = p->mark; { // '(' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 7)) ) @@ -10168,7 +10168,7 @@ t_lookahead_rule(Parser *p) p->mark = mark; } { // '[' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 9)) ) @@ -10179,7 +10179,7 @@ t_lookahead_rule(Parser *p) p->mark = mark; } { // '.' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 23)) ) @@ -10228,8 +10228,8 @@ t_atom_rule(Parser *p) } { // '(' target ')' expr_ty a; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -10249,8 +10249,8 @@ t_atom_rule(Parser *p) } { // '(' targets? ')' void *b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -10278,8 +10278,8 @@ t_atom_rule(Parser *p) } { // '[' targets? ']' void *b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 9)) && @@ -10324,8 +10324,8 @@ incorrect_arguments_rule(Parser *p) int mark = p->mark; { // args ',' '*' expr_ty args_var; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (args_var = args_rule(p)) && @@ -10346,7 +10346,7 @@ incorrect_arguments_rule(Parser *p) { // expression for_if_clauses ',' [args | expression for_if_clauses] expr_ty expression_var; asdl_seq* for_if_clauses_var; - void *literal; + Token * literal; void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( @@ -10371,7 +10371,7 @@ incorrect_arguments_rule(Parser *p) { // args ',' args expr_ty a; expr_ty args_var; - void *literal; + Token * literal; if ( (a = args_rule(p)) && @@ -10406,7 +10406,7 @@ invalid_named_expression_rule(Parser *p) { // expression ':=' expression expr_ty a; expr_ty expression_var; - void *literal; + Token * literal; if ( (a = expression_rule(p)) && @@ -10444,7 +10444,7 @@ invalid_assignment_rule(Parser *p) int mark = p->mark; { // list ':' expr_ty list_var; - void *literal; + Token * literal; if ( (list_var = list_rule(p)) && @@ -10461,7 +10461,7 @@ invalid_assignment_rule(Parser *p) p->mark = mark; } { // tuple ':' - void *literal; + Token * literal; expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) @@ -10481,7 +10481,7 @@ invalid_assignment_rule(Parser *p) { // expression ':' expression ['=' annotated_rhs] expr_ty expression_var; expr_ty expression_var_1; - void *literal; + Token * literal; void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( @@ -10539,7 +10539,7 @@ invalid_block_rule(Parser *p) void * res = NULL; int mark = p->mark; { // NEWLINE !INDENT - void *newline_var; + Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) && @@ -10573,7 +10573,7 @@ invalid_comprehension_rule(Parser *p) void *_tmp_133_var; expr_ty expression_var; asdl_seq* for_if_clauses_var; - void *literal; + Token * literal; if ( (_tmp_133_var = _tmp_133_rule(p)) && @@ -10644,11 +10644,11 @@ invalid_double_type_comments_rule(Parser *p) void * res = NULL; int mark = p->mark; { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT - void *indent_var; - void *newline_var; - void *newline_var_1; - void *type_comment_var; - void *type_comment_var_1; + Token * indent_var; + Token * newline_var; + Token * newline_var_1; + Token * type_comment_var; + Token * type_comment_var_1; if ( (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) && @@ -10693,7 +10693,7 @@ _loop0_1_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // NEWLINE - void *newline_var; + Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) ) @@ -10742,7 +10742,7 @@ _loop0_2_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // NEWLINE - void *newline_var; + Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) ) @@ -10792,7 +10792,7 @@ _loop0_4_rule(Parser *p) ssize_t n = 0; { // ',' expression expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -10877,7 +10877,7 @@ _loop0_6_rule(Parser *p) ssize_t n = 0; { // ',' expression expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -10962,7 +10962,7 @@ _loop0_8_rule(Parser *p) ssize_t n = 0; { // ',' expression expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -11047,7 +11047,7 @@ _loop0_10_rule(Parser *p) ssize_t n = 0; { // ',' expression expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -11185,7 +11185,7 @@ _loop0_13_rule(Parser *p) ssize_t n = 0; { // ';' small_stmt stmt_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 13)) && @@ -11261,7 +11261,7 @@ _tmp_14_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'import' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 513)) ) @@ -11272,7 +11272,7 @@ _tmp_14_rule(Parser *p) p->mark = mark; } { // 'from' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 514)) ) @@ -11297,7 +11297,7 @@ _tmp_15_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'def' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 522)) ) @@ -11308,7 +11308,7 @@ _tmp_15_rule(Parser *p) p->mark = mark; } { // '@' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 49)) ) @@ -11319,7 +11319,7 @@ _tmp_15_rule(Parser *p) p->mark = mark; } { // ASYNC - void *async_var; + Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) ) @@ -11344,7 +11344,7 @@ _tmp_16_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'class' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 523)) ) @@ -11355,7 +11355,7 @@ _tmp_16_rule(Parser *p) p->mark = mark; } { // '@' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 49)) ) @@ -11380,7 +11380,7 @@ _tmp_17_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'with' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 519)) ) @@ -11391,7 +11391,7 @@ _tmp_17_rule(Parser *p) p->mark = mark; } { // ASYNC - void *async_var; + Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) ) @@ -11416,7 +11416,7 @@ _tmp_18_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'for' - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 517)) ) @@ -11427,7 +11427,7 @@ _tmp_18_rule(Parser *p) p->mark = mark; } { // ASYNC - void *async_var; + Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) ) @@ -11453,7 +11453,7 @@ _tmp_19_rule(Parser *p) int mark = p->mark; { // '=' annotated_rhs expr_ty d; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 22)) && @@ -11485,8 +11485,8 @@ _tmp_20_rule(Parser *p) int mark = p->mark; { // '(' inside_paren_ann_assign_target ')' expr_ty b; - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 7)) && @@ -11531,7 +11531,7 @@ _tmp_21_rule(Parser *p) int mark = p->mark; { // '=' annotated_rhs expr_ty d; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 22)) && @@ -11696,7 +11696,7 @@ _loop0_26_rule(Parser *p) ssize_t n = 0; { // ',' NAME expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -11781,7 +11781,7 @@ _loop0_28_rule(Parser *p) ssize_t n = 0; { // ',' NAME expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -11857,7 +11857,7 @@ _tmp_29_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ',' expression - void *literal; + Token * literal; expr_ty z; if ( (literal = _PyPegen_expect_token(p, 12)) @@ -12000,7 +12000,7 @@ _loop0_33_rule(Parser *p) ssize_t n = 0; { // ',' import_from_as_name alias_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -12076,7 +12076,7 @@ _tmp_34_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'as' NAME - void *keyword; + Token * keyword; expr_ty z; if ( (keyword = _PyPegen_expect_token(p, 531)) @@ -12117,7 +12117,7 @@ _loop0_36_rule(Parser *p) ssize_t n = 0; { // ',' dotted_as_name alias_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -12193,7 +12193,7 @@ _tmp_37_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'as' NAME - void *keyword; + Token * keyword; expr_ty z; if ( (keyword = _PyPegen_expect_token(p, 531)) @@ -12234,7 +12234,7 @@ _loop0_39_rule(Parser *p) ssize_t n = 0; { // ',' with_item withitem_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -12319,7 +12319,7 @@ _loop0_41_rule(Parser *p) ssize_t n = 0; { // ',' with_item withitem_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -12404,7 +12404,7 @@ _loop0_43_rule(Parser *p) ssize_t n = 0; { // ',' with_item withitem_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -12489,7 +12489,7 @@ _loop0_45_rule(Parser *p) ssize_t n = 0; { // ',' with_item withitem_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -12565,7 +12565,7 @@ _tmp_46_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'as' target - void *keyword; + Token * keyword; expr_ty t; if ( (keyword = _PyPegen_expect_token(p, 531)) @@ -12650,7 +12650,7 @@ _tmp_48_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'as' target - void *keyword; + Token * keyword; expr_ty z; if ( (keyword = _PyPegen_expect_token(p, 531)) @@ -12682,7 +12682,7 @@ _tmp_49_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'from' expression - void *keyword; + Token * keyword; expr_ty z; if ( (keyword = _PyPegen_expect_token(p, 514)) @@ -12714,7 +12714,7 @@ _tmp_50_rule(Parser *p) void * res = NULL; int mark = p->mark; { // '->' expression - void *literal; + Token * literal; expr_ty z; if ( (literal = _PyPegen_expect_token(p, 51)) @@ -12746,7 +12746,7 @@ _tmp_51_rule(Parser *p) void * res = NULL; int mark = p->mark; { // '->' expression - void *literal; + Token * literal; expr_ty z; if ( (literal = _PyPegen_expect_token(p, 51)) @@ -12778,8 +12778,8 @@ _tmp_52_rule(Parser *p) void * res = NULL; int mark = p->mark; { // NEWLINE INDENT - void *indent_var; - void *newline_var; + Token * indent_var; + Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) && @@ -13573,8 +13573,8 @@ _tmp_68_rule(Parser *p) void * res = NULL; int mark = p->mark; { // '(' arguments? ')' - void *literal; - void *literal_1; + Token * literal; + Token * literal_1; void *z; if ( (literal = _PyPegen_expect_token(p, 7)) @@ -13617,7 +13617,7 @@ _loop0_70_rule(Parser *p) ssize_t n = 0; { // ',' star_expression expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -13755,7 +13755,7 @@ _loop0_73_rule(Parser *p) ssize_t n = 0; { // ',' star_named_expression expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -13884,7 +13884,7 @@ _tmp_75_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ',' lambda_plain_names - void *literal; + Token * literal; asdl_seq* x; if ( (literal = _PyPegen_expect_token(p, 12)) @@ -13916,7 +13916,7 @@ _tmp_76_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ',' lambda_names_with_default - void *literal; + Token * literal; asdl_seq* y; if ( (literal = _PyPegen_expect_token(p, 12)) @@ -13948,7 +13948,7 @@ _tmp_77_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ',' lambda_star_etc? - void *literal; + Token * literal; void *z; if ( (literal = _PyPegen_expect_token(p, 12)) @@ -13980,7 +13980,7 @@ _tmp_78_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ',' lambda_names_with_default - void *literal; + Token * literal; asdl_seq* y; if ( (literal = _PyPegen_expect_token(p, 12)) @@ -14012,7 +14012,7 @@ _tmp_79_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ',' lambda_star_etc? - void *literal; + Token * literal; void *z; if ( (literal = _PyPegen_expect_token(p, 12)) @@ -14044,7 +14044,7 @@ _tmp_80_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ',' lambda_names_with_default - void *literal; + Token * literal; asdl_seq* y; if ( (literal = _PyPegen_expect_token(p, 12)) @@ -14076,7 +14076,7 @@ _tmp_81_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ',' lambda_star_etc? - void *literal; + Token * literal; void *z; if ( (literal = _PyPegen_expect_token(p, 12)) @@ -14108,7 +14108,7 @@ _tmp_82_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ',' lambda_star_etc? - void *literal; + Token * literal; void *z; if ( (literal = _PyPegen_expect_token(p, 12)) @@ -14140,7 +14140,7 @@ _tmp_83_rule(Parser *p) void * res = NULL; int mark = p->mark; { // lambda_plain_names ',' - void *literal; + Token * literal; asdl_seq* n; if ( (n = lambda_plain_names_rule(p)) @@ -14222,7 +14222,7 @@ _tmp_85_rule(Parser *p) int mark = p->mark; { // ',' lambda_kwds arg_ty d; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 12)) && @@ -14307,7 +14307,7 @@ _tmp_87_rule(Parser *p) int mark = p->mark; { // ',' lambda_kwds arg_ty d; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 12)) && @@ -14339,7 +14339,7 @@ _tmp_88_rule(Parser *p) int mark = p->mark; { // '=' expression expr_ty e; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 22)) && @@ -14379,7 +14379,7 @@ _loop0_90_rule(Parser *p) ssize_t n = 0; { // ',' lambda_name_with_default NameDefaultPair* elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -14464,7 +14464,7 @@ _loop0_92_rule(Parser *p) ssize_t n = 0; { // ',' (lambda_plain_name !'=') void *elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -14699,7 +14699,7 @@ _tmp_96_rule(Parser *p) void * res = NULL; int mark = p->mark; { // '!=' - void *tok; + Token * tok; if ( (tok = _PyPegen_expect_token(p, 28)) ) @@ -14737,7 +14737,7 @@ _loop0_98_rule(Parser *p) ssize_t n = 0; { // ',' slice expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -14814,7 +14814,7 @@ _tmp_99_rule(Parser *p) int mark = p->mark; { // ':' expression? void *d; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 11)) && @@ -15039,7 +15039,7 @@ _tmp_104_rule(Parser *p) void * res = NULL; int mark = p->mark; { // star_named_expression ',' star_named_expressions? - void *literal; + Token * literal; expr_ty y; void *z; if ( @@ -15119,7 +15119,7 @@ _loop0_107_rule(Parser *p) ssize_t n = 0; { // ',' kvpair KeyValuePair* elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -15347,7 +15347,7 @@ _tmp_111_rule(Parser *p) int mark = p->mark; { // ',' args expr_ty c; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 12)) && @@ -15379,7 +15379,7 @@ _tmp_112_rule(Parser *p) int mark = p->mark; { // ',' args expr_ty c; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 12)) && @@ -15419,7 +15419,7 @@ _loop0_114_rule(Parser *p) ssize_t n = 0; { // ',' kwarg_or_starred KeywordOrStarred* elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -15504,7 +15504,7 @@ _loop0_116_rule(Parser *p) ssize_t n = 0; { // ',' kwarg_or_double_starred KeywordOrStarred* elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -15589,7 +15589,7 @@ _loop0_118_rule(Parser *p) ssize_t n = 0; { // ',' kwarg_or_starred KeywordOrStarred* elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -15674,7 +15674,7 @@ _loop0_120_rule(Parser *p) ssize_t n = 0; { // ',' kwarg_or_double_starred KeywordOrStarred* elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -15808,7 +15808,7 @@ _loop0_123_rule(Parser *p) ssize_t n = 0; { // ',' star_target expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -15920,7 +15920,7 @@ _loop0_126_rule(Parser *p) ssize_t n = 0; { // ',' del_target expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -16005,7 +16005,7 @@ _loop0_128_rule(Parser *p) ssize_t n = 0; { // ',' target expr_ty elem; - void *literal; + Token * literal; while ( (literal = _PyPegen_expect_token(p, 12)) && @@ -16121,7 +16121,7 @@ _tmp_130_rule(Parser *p) int mark = p->mark; { // '=' annotated_rhs expr_ty annotated_rhs_var; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 22)) && @@ -16148,7 +16148,7 @@ _tmp_131_rule(Parser *p) void * res = NULL; int mark = p->mark; { // '=' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 22)) ) @@ -16220,7 +16220,7 @@ _tmp_133_rule(Parser *p) void * res = NULL; int mark = p->mark; { // '[' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 9)) ) @@ -16231,7 +16231,7 @@ _tmp_133_rule(Parser *p) p->mark = mark; } { // '(' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 7)) ) @@ -16242,7 +16242,7 @@ _tmp_133_rule(Parser *p) p->mark = mark; } { // '{' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 25)) ) @@ -16352,7 +16352,7 @@ _tmp_136_rule(Parser *p) void * res = NULL; int mark = p->mark; { // star_targets '=' - void *literal; + Token * literal; expr_ty z; if ( (z = star_targets_rule(p)) @@ -16384,7 +16384,7 @@ _tmp_137_rule(Parser *p) void * res = NULL; int mark = p->mark; { // '.' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 23)) ) @@ -16395,7 +16395,7 @@ _tmp_137_rule(Parser *p) p->mark = mark; } { // '...' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 52)) ) @@ -16420,7 +16420,7 @@ _tmp_138_rule(Parser *p) void * res = NULL; int mark = p->mark; { // '.' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 23)) ) @@ -16431,7 +16431,7 @@ _tmp_138_rule(Parser *p) p->mark = mark; } { // '...' - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 52)) ) @@ -16457,8 +16457,8 @@ _tmp_139_rule(Parser *p) int mark = p->mark; { // '@' named_expression NEWLINE expr_ty f; - void *literal; - void *newline_var; + Token * literal; + Token * newline_var; if ( (literal = _PyPegen_expect_token(p, 49)) && @@ -16492,7 +16492,7 @@ _tmp_140_rule(Parser *p) int mark = p->mark; { // ',' star_expression expr_ty c; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 12)) && @@ -16524,7 +16524,7 @@ _tmp_141_rule(Parser *p) int mark = p->mark; { // ',' expression expr_ty c; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 12)) && @@ -16583,7 +16583,7 @@ _tmp_143_rule(Parser *p) int mark = p->mark; { // 'or' conjunction expr_ty c; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 532)) && @@ -16615,7 +16615,7 @@ _tmp_144_rule(Parser *p) int mark = p->mark; { // 'and' inversion expr_ty c; - void *keyword; + Token * keyword; if ( (keyword = _PyPegen_expect_token(p, 533)) && @@ -16646,7 +16646,7 @@ _tmp_145_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'if' disjunction - void *keyword; + Token * keyword; expr_ty z; if ( (keyword = _PyPegen_expect_token(p, 510)) @@ -16678,7 +16678,7 @@ _tmp_146_rule(Parser *p) void * res = NULL; int mark = p->mark; { // 'if' disjunction - void *keyword; + Token * keyword; expr_ty z; if ( (keyword = _PyPegen_expect_token(p, 510)) @@ -16711,7 +16711,7 @@ _tmp_147_rule(Parser *p) int mark = p->mark; { // ',' star_target expr_ty c; - void *literal; + Token * literal; if ( (literal = _PyPegen_expect_token(p, 12)) && diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index a59da2ffae8e1..6c77f43991bbe 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -65,9 +65,9 @@ class FunctionCall: function: str arguments: Optional[List[Any]] = None assigned_variable: Optional[str] = None + return_type: Optional[str] = None nodetype: Optional[NodeTypes] = None force_true: bool = False - metadata: Dict[str, Any] = field(default_factory=dict) def __str__(self) -> str: parts = [] @@ -101,6 +101,7 @@ def keyword_helper(self, keyword: str) -> FunctionCall: assigned_variable="keyword", function="_PyPegen_expect_token", arguments=["p", self.keyword_cache[keyword]], + return_type="Token *", nodetype=NodeTypes.KEYWORD, ) @@ -113,21 +114,26 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: function=f"_PyPegen_{name.lower()}_token", arguments=["p"], nodetype=BASE_NODETYPES[name], - metadata={"rulename": name.lower()}, + return_type="expr_ty", ) return FunctionCall( assigned_variable=f"{name.lower()}_var", function=f"_PyPegen_expect_token", arguments=["p", name], nodetype=NodeTypes.GENERIC_TOKEN, - metadata={"rulename": name.lower()}, + return_type="Token *", ) + type = None + rule = self.gen.all_rules.get(name.lower()) + if rule is not None: + type = "asdl_seq *" if rule.is_loop() or rule.is_gather() else rule.type + return FunctionCall( assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], - metadata={"rulename": name.lower()}, + return_type=type, ) def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: @@ -142,6 +148,7 @@ def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: function=f"_PyPegen_expect_token", arguments=["p", type], nodetype=NodeTypes.GENERIC_TOKEN, + return_type="Token *", ) def visit_Rhs(self, node: Rhs) -> FunctionCall: @@ -160,10 +167,7 @@ def can_we_inline(node: Rhs) -> int: else: name = self.gen.name_node(node) self.cache[node] = FunctionCall( - assigned_variable=f"{name}_var", - function=f"{name}_rule", - arguments=["p"], - metadata={"rulename": name}, + assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], ) return self.cache[node] @@ -179,16 +183,19 @@ def lookahead_call_helper(self, node: Lookahead, positive: int) -> FunctionCall: return FunctionCall( function=f"_PyPegen_lookahead_with_name", arguments=[positive, call.function, *call.arguments], + return_type="int", ) elif call.nodetype in {NodeTypes.GENERIC_TOKEN, NodeTypes.KEYWORD}: return FunctionCall( function=f"_PyPegen_lookahead_with_int", arguments=[positive, call.function, *call.arguments], + return_type="int", ) else: return FunctionCall( function=f"_PyPegen_lookahead", arguments=[positive, call.function, *call.arguments], + return_type="int", ) def visit_PositiveLookahead(self, node: PositiveLookahead) -> FunctionCall: @@ -214,7 +221,7 @@ def visit_Repeat0(self, node: Repeat0) -> FunctionCall: assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], - metadata={"rulename": name}, + return_type="asdl_seq *", ) return self.cache[node] @@ -226,7 +233,7 @@ def visit_Repeat1(self, node: Repeat1) -> FunctionCall: assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], - metadata={"rulename": name}, + return_type="asdl_seq *", ) return self.cache[node] @@ -238,7 +245,7 @@ def visit_Gather(self, node: Gather) -> FunctionCall: assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], - metadata={"rulename": name}, + return_type="asdl_seq *", ) return self.cache[node] @@ -247,7 +254,10 @@ def visit_Group(self, node: Group) -> FunctionCall: def visit_Cut(self, node: Cut) -> FunctionCall: return FunctionCall( - assigned_variable="cut_var", function="1", nodetype=NodeTypes.CUT_OPERATOR + assigned_variable="cut_var", + return_type="int", + function="1", + nodetype=NodeTypes.CUT_OPERATOR, ) @@ -701,24 +711,4 @@ def collect_vars(self, node: Alt) -> Dict[Optional[str], Optional[str]]: def add_var(self, node: NamedItem) -> Tuple[Optional[str], Optional[str]]: call = self.callmakervisitor.visit(node.item) - if not call.assigned_variable: - return None, None - if call.nodetype == NodeTypes.CUT_OPERATOR: - return call.assigned_variable, "int" - - name = call.assigned_variable - rulename = call.metadata.get("rulename") - - type: Optional[str] = None - - assert self.all_rules is not None - if rulename and rulename in self.all_rules: - rule = self.all_rules.get(rulename) - if rule.is_loop() or rule.is_gather(): - type = "asdl_seq *" - else: - type = rule.type - elif call.nodetype in BASE_NODETYPES.values(): - type = "expr_ty" - - return self.dedupe(node.name if node.name else call.assigned_variable), type + return self.dedupe(node.name if node.name else call.assigned_variable), call.return_type diff --git a/Tools/peg_generator/pegen/parser_generator.py b/Tools/peg_generator/pegen/parser_generator.py index 3f6cdbe409d56..b92df2267762d 100644 --- a/Tools/peg_generator/pegen/parser_generator.py +++ b/Tools/peg_generator/pegen/parser_generator.py @@ -47,7 +47,7 @@ def __init__(self, grammar: Grammar, file: Optional[IO[Text]]): self.todo = self.rules.copy() # Rules to generate self.counter = 0 # For name_rule()/name_loop() self.keyword_counter = 499 # For keyword_type() - self.all_rules: Optional[Dict[str, Rule]] = None # Rules + temporal rules + self.all_rules: Dict[str, Rule] = {} # Rules + temporal rules self._local_variable_stack: List[List[str]] = [] @contextlib.contextmanager @@ -87,13 +87,13 @@ def collect_todo(self) -> None: done: Set[str] = set() while True: alltodo = list(self.todo) + self.all_rules.update(self.todo) todo = [i for i in alltodo if i not in done] if not todo: break for rulename in todo: self.todo[rulename].collect_todo(self) done = set(alltodo) - self.all_rules = self.todo.copy() def keyword_type(self) -> int: self.keyword_counter += 1 From webhook-mailer at python.org Fri May 1 08:15:39 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Fri, 01 May 2020 12:15:39 -0000 Subject: [Python-checkins] bpo-32494: Use gdbm_count for dbm_length if possible (GH-19814) Message-ID: https://github.com/python/cpython/commit/8727664557cd44dcd00612ccba816942e8f885ab commit: 8727664557cd44dcd00612ccba816942e8f885ab branch: master author: Dong-hee Na committer: GitHub date: 2020-05-01T14:15:35+02:00 summary: bpo-32494: Use gdbm_count for dbm_length if possible (GH-19814) files: A Misc/NEWS.d/next/Library/2020-04-30-22-25-08.bpo-32494.1xaU5l.rst M Modules/_gdbmmodule.c diff --git a/Misc/NEWS.d/next/Library/2020-04-30-22-25-08.bpo-32494.1xaU5l.rst b/Misc/NEWS.d/next/Library/2020-04-30-22-25-08.bpo-32494.1xaU5l.rst new file mode 100644 index 0000000000000..3989700c5cd83 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-30-22-25-08.bpo-32494.1xaU5l.rst @@ -0,0 +1,2 @@ +Update :mod:`dbm.gnu` to use gdbm_count if possible when calling +:func:`len`. Patch by Dong-hee Na. diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c index 7a9649b54119b..dd4c6b16f745c 100644 --- a/Modules/_gdbmmodule.c +++ b/Modules/_gdbmmodule.c @@ -36,7 +36,7 @@ values() methods are not supported."); typedef struct { PyObject_HEAD - int di_size; /* -1 means recompute */ + Py_ssize_t di_size; /* -1 means recompute */ GDBM_FILE di_dbm; } dbmobject; @@ -102,19 +102,39 @@ dbm_length(dbmobject *dp) return -1; } if (dp->di_size < 0) { +#if GDBM_VERSION_MAJOR >= 1 && GDBM_VERSION_MINOR >= 11 + errno = 0; + gdbm_count_t count; + if (gdbm_count(dp->di_dbm, &count) == -1) { + if (errno != 0) { + PyErr_SetFromErrno(DbmError); + } + else { + PyErr_SetString(DbmError, gdbm_strerror(gdbm_errno)); + } + return -1; + } + if (count > PY_SSIZE_T_MAX) { + PyErr_SetString(PyExc_OverflowError, "count exceeds PY_SSIZE_T_MAX"); + return -1; + } + dp->di_size = count; +#else datum key,okey; - int size; okey.dsize=0; okey.dptr=NULL; - size = 0; - for (key=gdbm_firstkey(dp->di_dbm); key.dptr; + Py_ssize_t size = 0; + for (key = gdbm_firstkey(dp->di_dbm); key.dptr; key = gdbm_nextkey(dp->di_dbm,okey)) { size++; - if(okey.dsize) free(okey.dptr); + if (okey.dsize) { + free(okey.dptr); + } okey=key; } dp->di_size = size; +#endif } return dp->di_size; } From webhook-mailer at python.org Fri May 1 09:13:56 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Fri, 01 May 2020 13:13:56 -0000 Subject: [Python-checkins] bpo-40334: Improve column offsets for thrown syntax errors by Pegen (GH-19782) Message-ID: https://github.com/python/cpython/commit/76c1b4d5c5a610c09943e1ee7ae18f1957804730 commit: 76c1b4d5c5a610c09943e1ee7ae18f1957804730 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-01T14:13:43+01:00 summary: bpo-40334: Improve column offsets for thrown syntax errors by Pegen (GH-19782) files: M Grammar/python.gram M Lib/test/test_cmd_line_script.py M Lib/test/test_exceptions.py M Parser/pegen/parse.c M Parser/pegen/pegen.c M Parser/pegen/pegen.h diff --git a/Grammar/python.gram b/Grammar/python.gram index 38107fcf7354c..3813d8845be24 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -609,7 +609,7 @@ invalid_assignment: | expression ':' expression ['=' annotated_rhs] { RAISE_SYNTAX_ERROR("illegal target for annotation") } | a=expression ('=' | augassign) (yield_expr | star_expressions) { - RAISE_SYNTAX_ERROR("cannot assign to %s", _PyPegen_get_expr_name(a)) } + RAISE_SYNTAX_ERROR_NO_COL_OFFSET("cannot assign to %s", _PyPegen_get_expr_name(a)) } invalid_block: | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") } invalid_comprehension: diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index f0130e376aec4..1fc9500738f35 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -599,7 +599,7 @@ def test_syntaxerror_unindented_caret_position(self): exitcode, stdout, stderr = assert_python_failure(script_name) text = io.TextIOWrapper(io.BytesIO(stderr), 'ascii').read() # Confirm that the caret is located under the first 1 character - self.assertIn("\n 1 + 1 = 2\n ^", text) + self.assertIn("\n 1 + 1 = 2\n ^", text) def test_syntaxerror_indented_caret_position(self): script = textwrap.dedent("""\ @@ -611,7 +611,7 @@ def test_syntaxerror_indented_caret_position(self): exitcode, stdout, stderr = assert_python_failure(script_name) text = io.TextIOWrapper(io.BytesIO(stderr), 'ascii').read() # Confirm that the caret is located under the first 1 character - self.assertIn("\n 1 + 1 = 2\n ^", text) + self.assertIn("\n 1 + 1 = 2\n ^", text) # Try the same with a form feed at the start of the indented line script = ( @@ -622,7 +622,7 @@ def test_syntaxerror_indented_caret_position(self): exitcode, stdout, stderr = assert_python_failure(script_name) text = io.TextIOWrapper(io.BytesIO(stderr), "ascii").read() self.assertNotIn("\f", text) - self.assertIn("\n 1 + 1 = 2\n ^", text) + self.assertIn("\n 1 + 1 = 2\n ^", text) def test_syntaxerror_multi_line_fstring(self): script = 'foo = f"""{}\nfoo"""\n' diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index a207fb48632f9..354b3f4843718 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -178,19 +178,19 @@ def ckmsg(src, msg, exception=SyntaxError): s = '''if True:\n print()\n\texec "mixed tabs and spaces"''' ckmsg(s, "inconsistent use of tabs and spaces in indentation", TabError) - @support.skip_if_new_parser("Pegen column offsets might be different") - def testSyntaxErrorOffset(self): - def check(src, lineno, offset, encoding='utf-8'): - with self.assertRaises(SyntaxError) as cm: - compile(src, '', 'exec') - self.assertEqual(cm.exception.lineno, lineno) - self.assertEqual(cm.exception.offset, offset) - if cm.exception.text is not None: - if not isinstance(src, str): - src = src.decode(encoding, 'replace') - line = src.split('\n')[lineno-1] - self.assertIn(line, cm.exception.text) + def check(self, src, lineno, offset, encoding='utf-8'): + with self.assertRaises(SyntaxError) as cm: + compile(src, '', 'exec') + self.assertEqual(cm.exception.lineno, lineno) + self.assertEqual(cm.exception.offset, offset) + if cm.exception.text is not None: + if not isinstance(src, str): + src = src.decode(encoding, 'replace') + line = src.split('\n')[lineno-1] + self.assertIn(line, cm.exception.text) + def testSyntaxErrorOffset(self): + check = self.check check('def fact(x):\n\treturn x!\n', 2, 10) check('1 +\n', 1, 4) check('def spam():\n print(1)\n print(2)', 3, 10) @@ -238,20 +238,20 @@ def baz(): check('nonlocal x', 1, 1) check('def f():\n global x\n nonlocal x', 2, 3) - # Errors thrown by ast.c - check('for 1 in []: pass', 1, 5) - check('def f(*):\n pass', 1, 7) - check('[*x for x in xs]', 1, 2) - check('def f():\n x, y: int', 2, 3) - check('(yield i) = 2', 1, 1) - check('foo(x for x in range(10), 100)', 1, 5) - check('foo(1=2)', 1, 5) - # Errors thrown by future.c check('from __future__ import doesnt_exist', 1, 1) check('from __future__ import braces', 1, 1) check('x=1\nfrom __future__ import division', 2, 1) + @support.skip_if_new_parser("Pegen column offsets might be different") + def testSyntaxErrorOffsetCustom(self): + self.check('for 1 in []: pass', 1, 5) + self.check('def f(*):\n pass', 1, 7) + self.check('[*x for x in xs]', 1, 2) + self.check('def f():\n x, y: int', 2, 3) + self.check('(yield i) = 2', 1, 1) + self.check('foo(x for x in range(10), 100)', 1, 5) + self.check('foo(1=2)', 1, 5) @cpython_only def testSettingException(self): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 2be5e384ae532..33c92c232c54a 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -10515,7 +10515,7 @@ invalid_assignment_rule(Parser *p) (_tmp_132_var = _tmp_132_rule(p)) ) { - res = RAISE_SYNTAX_ERROR ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); + res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); if (res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 40c09ffcc3a64..a7add8fbb144e 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -145,11 +145,15 @@ byte_offset_to_character_offset(PyObject *line, int col_offset) if (!str) { return 0; } - PyObject *text = PyUnicode_DecodeUTF8(str, col_offset, NULL); + PyObject *text = PyUnicode_DecodeUTF8(str, col_offset, "replace"); if (!text) { return 0; } Py_ssize_t size = PyUnicode_GET_LENGTH(text); + str = PyUnicode_AsUTF8(text); + if (str != NULL && (int)strlen(str) == col_offset) { + size = strlen(str); + } Py_DECREF(text); return size; } @@ -297,66 +301,21 @@ raise_tokenizer_init_error(PyObject *filename) } static inline PyObject * -get_error_line(char *buffer) -{ - char *newline = strchr(buffer, '\n'); - if (newline) { - return PyUnicode_FromStringAndSize(buffer, newline - buffer); - } - else { - return PyUnicode_FromString(buffer); - } -} - -static int -tokenizer_error_with_col_offset(Parser *p, PyObject *errtype, const char *errmsg) +get_error_line(char *buffer, int is_file) { - PyObject *errstr = NULL; - PyObject *value = NULL; - size_t col_number = -1; - - errstr = PyUnicode_FromString(errmsg); - if (!errstr) { - return -1; - } - - PyObject *loc = NULL; - if (p->start_rule == Py_file_input) { - loc = PyErr_ProgramTextObject(p->tok->filename, p->tok->lineno); - } - if (!loc) { - loc = get_error_line(p->tok->buf); + const char *newline; + if (is_file) { + newline = strrchr(buffer, '\n'); + } else { + newline = strchr(buffer, '\n'); } - if (loc) { - col_number = p->tok->cur - p->tok->buf; + if (newline) { + return PyUnicode_DecodeUTF8(buffer, newline - buffer, "replace"); } else { - Py_INCREF(Py_None); - loc = Py_None; + return PyUnicode_DecodeUTF8(buffer, strlen(buffer), "replace"); } - - PyObject *tmp = Py_BuildValue("(OiiN)", p->tok->filename, p->tok->lineno, - col_number, loc); - if (!tmp) { - goto error; - } - - value = PyTuple_Pack(2, errstr, tmp); - Py_DECREF(tmp); - if (!value) { - goto error; - } - PyErr_SetObject(errtype, value); - - Py_XDECREF(value); - Py_XDECREF(errstr); - return -1; - -error: - Py_XDECREF(errstr); - Py_XDECREF(loc); - return -1; } static int @@ -376,20 +335,20 @@ tokenizer_error(Parser *p) msg = "invalid character in identifier"; break; case E_BADPREFIX: - return tokenizer_error_with_col_offset(p, - errtype, "invalid string prefix"); + RAISE_SYNTAX_ERROR("invalid string prefix"); + return -1; case E_EOFS: - return tokenizer_error_with_col_offset(p, - errtype, "EOF while scanning triple-quoted string literal"); + RAISE_SYNTAX_ERROR("EOF while scanning triple-quoted string literal"); + return -1; case E_EOLS: - return tokenizer_error_with_col_offset(p, - errtype, "EOL while scanning string literal"); + RAISE_SYNTAX_ERROR("EOL while scanning string literal"); + return -1; case E_EOF: - return tokenizer_error_with_col_offset(p, - errtype, "unexpected EOF while parsing"); + RAISE_SYNTAX_ERROR("unexpected EOF while parsing"); + return -1; case E_DEDENT: - return tokenizer_error_with_col_offset(p, - PyExc_IndentationError, "unindent does not match any outer indentation level"); + RAISE_INDENTATION_ERROR("unindent does not match any outer indentation level"); + return -1; case E_INTR: if (!PyErr_Occurred()) { PyErr_SetNone(PyExc_KeyboardInterrupt); @@ -421,14 +380,14 @@ tokenizer_error(Parser *p) } void * -_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) +_PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const char *errmsg, ...) { PyObject *value = NULL; PyObject *errstr = NULL; PyObject *loc = NULL; PyObject *tmp = NULL; Token *t = p->tokens[p->fill - 1]; - Py_ssize_t col_number = 0; + Py_ssize_t col_number = !with_col_number; va_list va; va_start(va, errmsg); @@ -443,14 +402,20 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) } if (!loc) { - loc = get_error_line(p->tok->buf); + loc = get_error_line(p->tok->buf, p->start_rule == Py_file_input); } - if (loc) { - int col_offset = t->col_offset == -1 ? 0 : t->col_offset; - col_number = byte_offset_to_character_offset(loc, col_offset) + 1; + if (loc && with_col_number) { + int col_offset; + if (t->col_offset == -1) { + col_offset = Py_SAFE_DOWNCAST(p->tok->cur - p->tok->buf, + intptr_t, int); + } else { + col_offset = t->col_offset + 1; + } + col_number = byte_offset_to_character_offset(loc, col_offset); } - else { + else if (!loc) { Py_INCREF(Py_None); loc = Py_None; } @@ -632,14 +597,6 @@ _PyPegen_fill_token(Parser *p) type = PyTokenizer_Get(p->tok, &start, &end); } - if (type == ERRORTOKEN) { - if (p->tok->done == E_DECODE) { - return raise_decode_error(p); - } - else { - return tokenizer_error(p); - } - } if (type == ENDMARKER && p->start_rule == Py_single_input && p->parsing_started) { type = NEWLINE; /* Add an extra newline */ p->parsing_started = 0; @@ -700,6 +657,16 @@ _PyPegen_fill_token(Parser *p) t->end_col_offset = p->tok->lineno == 1 ? p->starting_col_offset + end_col_offset : end_col_offset; p->fill += 1; + + if (type == ERRORTOKEN) { + if (p->tok->done == E_DECODE) { + return raise_decode_error(p); + } + else { + return tokenizer_error(p); + } + } + return 0; } diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index 1620f92609472..cbe6f197ac742 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -126,14 +126,15 @@ expr_ty _PyPegen_name_token(Parser *p); expr_ty _PyPegen_number_token(Parser *p); void *_PyPegen_string_token(Parser *p); const char *_PyPegen_get_expr_name(expr_ty); -void *_PyPegen_raise_error(Parser *p, PyObject *, const char *errmsg, ...); +void *_PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const char *errmsg, ...); void *_PyPegen_dummy_name(Parser *p, ...); #define UNUSED(expr) do { (void)(expr); } while (0) #define EXTRA_EXPR(head, tail) head->lineno, head->col_offset, tail->end_lineno, tail->end_col_offset, p->arena #define EXTRA start_lineno, start_col_offset, end_lineno, end_col_offset, p->arena -#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, msg, ##__VA_ARGS__) -#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, msg, ##__VA_ARGS__) +#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 1, msg, ##__VA_ARGS__) +#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, 1, msg, ##__VA_ARGS__) +#define RAISE_SYNTAX_ERROR_NO_COL_OFFSET(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 0, msg, ##__VA_ARGS__) Py_LOCAL_INLINE(void *) CHECK_CALL(Parser *p, void *result) @@ -190,8 +191,8 @@ INVALID_VERSION_CHECK(Parser *p, int version, char *msg, void *node) } if (p->feature_version < version) { p->error_indicator = 1; - return _PyPegen_raise_error(p, PyExc_SyntaxError, "%s only supported in Python 3.%i and greater", - msg, version); + return RAISE_SYNTAX_ERROR("%s only supported in Python 3.%i and greater", + msg, version); } return node; } From webhook-mailer at python.org Fri May 1 10:18:31 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 01 May 2020 14:18:31 -0000 Subject: [Python-checkins] [3.8] bpo-39562: Prevent collision of future and compiler flags (GH-19230) (GH-19835) Message-ID: https://github.com/python/cpython/commit/5055c274c6e4f2bb8025910dedf0ff89f4bdd170 commit: 5055c274c6e4f2bb8025910dedf0ff89f4bdd170 branch: 3.8 author: Pablo Galindo committer: GitHub date: 2020-05-01T07:18:27-07:00 summary: [3.8] bpo-39562: Prevent collision of future and compiler flags (GH-19230) (GH-19835) The constant values of future flags in the __future__ module is updated in order to prevent collision with compiler flags. Previously PyCF_ALLOW_TOP_LEVEL_AWAIT was clashing with CO_FUTURE_DIVISION.. (cherry picked from commit 4454057269b995341b04d13f0bf97f96080f27d0) Co-authored-by: Batuhan Ta?kaya files: A Misc/NEWS.d/next/Core and Builtins/2020-05-01-14-58-16.bpo-39562.KCsX8n.rst M Doc/whatsnew/3.8.rst M Include/code.h M Include/compile.h M Lib/__future__.py M Lib/test/test_future.py M Python/bltinmodule.c diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index ad7d5d4c670b3..dca7f48979e16 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -2234,3 +2234,8 @@ Notable changes in Python 3.8.2 Fixed a regression with the ``ignore`` callback of :func:`shutil.copytree`. The argument types are now str and List[str] again. (Contributed by Manuel Barkhau and Giampaolo Rodola in :issue:`39390`.) + +The constant values of future flags in the :mod:`__future__` module +are updated in order to prevent collision with compiler flags. Previously +``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. +(Contributed by Batuhan Taskaya in :issue:`39562`) diff --git a/Include/code.h b/Include/code.h index 3afddd20c80d7..a1cd58f44a0e8 100644 --- a/Include/code.h +++ b/Include/code.h @@ -88,19 +88,19 @@ typedef struct { #define CO_ITERABLE_COROUTINE 0x0100 #define CO_ASYNC_GENERATOR 0x0200 -/* These are no longer used. */ -#if 0 -#define CO_GENERATOR_ALLOWED 0x1000 -#endif -#define CO_FUTURE_DIVISION 0x2000 -#define CO_FUTURE_ABSOLUTE_IMPORT 0x4000 /* do absolute imports by default */ -#define CO_FUTURE_WITH_STATEMENT 0x8000 -#define CO_FUTURE_PRINT_FUNCTION 0x10000 -#define CO_FUTURE_UNICODE_LITERALS 0x20000 - -#define CO_FUTURE_BARRY_AS_BDFL 0x40000 -#define CO_FUTURE_GENERATOR_STOP 0x80000 -#define CO_FUTURE_ANNOTATIONS 0x100000 +/* bpo-39562: These constant values are changed in Python 3.9 + to prevent collision with compiler flags. CO_FUTURE_ and PyCF_ + constants must be kept unique. PyCF_ constants can use bits from + 0x0100 to 0x10000. CO_FUTURE_ constants use bits starting at 0x20000. */ +#define CO_FUTURE_DIVISION 0x20000 +#define CO_FUTURE_ABSOLUTE_IMPORT 0x40000 /* do absolute imports by default */ +#define CO_FUTURE_WITH_STATEMENT 0x80000 +#define CO_FUTURE_PRINT_FUNCTION 0x100000 +#define CO_FUTURE_UNICODE_LITERALS 0x200000 + +#define CO_FUTURE_BARRY_AS_BDFL 0x400000 +#define CO_FUTURE_GENERATOR_STOP 0x800000 +#define CO_FUTURE_ANNOTATIONS 0x1000000 /* This value is found in the co_cell2arg array when the associated cell variable does not correspond to an argument. */ diff --git a/Include/compile.h b/Include/compile.h index 1cda955c14255..015584d03b023 100644 --- a/Include/compile.h +++ b/Include/compile.h @@ -18,12 +18,18 @@ PyAPI_FUNC(PyCodeObject *) PyNode_Compile(struct _node *, const char *); CO_FUTURE_UNICODE_LITERALS | CO_FUTURE_BARRY_AS_BDFL | \ CO_FUTURE_GENERATOR_STOP | CO_FUTURE_ANNOTATIONS) #define PyCF_MASK_OBSOLETE (CO_NESTED) + +/* bpo-39562: CO_FUTURE_ and PyCF_ constants must be kept unique. + PyCF_ constants can use bits from 0x0100 to 0x10000. + CO_FUTURE_ constants use bits starting at 0x20000. */ #define PyCF_SOURCE_IS_UTF8 0x0100 #define PyCF_DONT_IMPLY_DEDENT 0x0200 #define PyCF_ONLY_AST 0x0400 #define PyCF_IGNORE_COOKIE 0x0800 #define PyCF_TYPE_COMMENTS 0x1000 #define PyCF_ALLOW_TOP_LEVEL_AWAIT 0x2000 +#define PyCF_COMPILE_MASK (PyCF_ONLY_AST | PyCF_ALLOW_TOP_LEVEL_AWAIT | \ + PyCF_TYPE_COMMENTS | PyCF_DONT_IMPLY_DEDENT) #ifndef Py_LIMITED_API typedef struct { diff --git a/Lib/__future__.py b/Lib/__future__.py index e1135685d846c..d7cb8ac5f4974 100644 --- a/Lib/__future__.py +++ b/Lib/__future__.py @@ -68,14 +68,14 @@ # this module. CO_NESTED = 0x0010 # nested_scopes CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000) -CO_FUTURE_DIVISION = 0x2000 # division -CO_FUTURE_ABSOLUTE_IMPORT = 0x4000 # perform absolute imports by default -CO_FUTURE_WITH_STATEMENT = 0x8000 # with statement -CO_FUTURE_PRINT_FUNCTION = 0x10000 # print function -CO_FUTURE_UNICODE_LITERALS = 0x20000 # unicode string literals -CO_FUTURE_BARRY_AS_BDFL = 0x40000 -CO_FUTURE_GENERATOR_STOP = 0x80000 # StopIteration becomes RuntimeError in generators -CO_FUTURE_ANNOTATIONS = 0x100000 # annotations become strings at runtime +CO_FUTURE_DIVISION = 0x20000 # division +CO_FUTURE_ABSOLUTE_IMPORT = 0x40000 # perform absolute imports by default +CO_FUTURE_WITH_STATEMENT = 0x80000 # with statement +CO_FUTURE_PRINT_FUNCTION = 0x100000 # print function +CO_FUTURE_UNICODE_LITERALS = 0x200000 # unicode string literals +CO_FUTURE_BARRY_AS_BDFL = 0x400000 +CO_FUTURE_GENERATOR_STOP = 0x800000 # StopIteration becomes RuntimeError in generators +CO_FUTURE_ANNOTATIONS = 0x1000000 # annotations become strings at runtime class _Feature: def __init__(self, optionalRelease, mandatoryRelease, compiler_flag): diff --git a/Lib/test/test_future.py b/Lib/test/test_future.py index d83c47ef15591..ea13533b5143d 100644 --- a/Lib/test/test_future.py +++ b/Lib/test/test_future.py @@ -1,5 +1,7 @@ # Test various flavors of legal and illegal future statements +import __future__ +import ast import unittest from test import support from textwrap import dedent @@ -74,6 +76,21 @@ def test_badfuture10(self): from test import badsyntax_future10 self.check_syntax_error(cm.exception, "badsyntax_future10", 3) + def test_ensure_flags_dont_clash(self): + # bpo-39562: test that future flags and compiler flags doesn't clash + + # obtain future flags (CO_FUTURE_***) from the __future__ module + flags = { + f"CO_FUTURE_{future.upper()}": getattr(__future__, future).compiler_flag + for future in __future__.all_feature_names + } + # obtain some of the exported compiler flags (PyCF_***) from the ast module + flags.update({ + flag: getattr(ast, flag) + for flag in dir(ast) if flag.startswith("PyCF_") + }) + self.assertCountEqual(set(flags.values()), flags.values()) + def test_parserhack(self): # test that the parser.c::future_hack function works as expected # Note: although this test must pass, it's not testing the original diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-14-58-16.bpo-39562.KCsX8n.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-14-58-16.bpo-39562.KCsX8n.rst new file mode 100644 index 0000000000000..5d7ef9606b449 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-14-58-16.bpo-39562.KCsX8n.rst @@ -0,0 +1,3 @@ +The constant values of future flags in the :mod:`__future__` module are +updated in order to prevent collision with compiler flags. Previously +``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 33f969094e7d1..fe22bbdde4e91 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -738,7 +738,7 @@ builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename, } if (flags & - ~(PyCF_MASK | PyCF_MASK_OBSOLETE | PyCF_DONT_IMPLY_DEDENT | PyCF_ONLY_AST | PyCF_TYPE_COMMENTS)) + ~(PyCF_MASK | PyCF_MASK_OBSOLETE | PyCF_COMPILE_MASK)) { PyErr_SetString(PyExc_ValueError, "compile(): unrecognised flags"); From webhook-mailer at python.org Fri May 1 11:02:14 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 01 May 2020 15:02:14 -0000 Subject: [Python-checkins] bpo-40334: unskip test_function_type in test_unparse with the new parser (GH-19837) Message-ID: https://github.com/python/cpython/commit/ea7297cf8f1aad4df8921a3d81a75118511afe77 commit: ea7297cf8f1aad4df8921a3d81a75118511afe77 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-01T08:02:06-07:00 summary: bpo-40334: unskip test_function_type in test_unparse with the new parser (GH-19837) files: M Lib/test/test_unparse.py diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index b913569585a21..d4089a3fc1cdf 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -327,7 +327,6 @@ def test_constant_tuples(self): ast.Constant(value=(1, 2, 3), kind=None), "(1, 2, 3)" ) - @test.support.skip_if_new_parser("Pegen does not support type annotation yet") def test_function_type(self): for function_type in ( "() -> int", From webhook-mailer at python.org Fri May 1 11:32:17 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 01 May 2020 15:32:17 -0000 Subject: [Python-checkins] bpo-40334: Correct return value of func_type_comment (GH-19833) Message-ID: https://github.com/python/cpython/commit/d955241469c18c946924dba79c18a9ef200391ad commit: d955241469c18c946924dba79c18a9ef200391ad branch: master author: Pablo Galindo committer: GitHub date: 2020-05-01T08:32:09-07:00 summary: bpo-40334: Correct return value of func_type_comment (GH-19833) files: M Grammar/python.gram M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 3813d8845be24..0acd851e09ff6 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -210,7 +210,7 @@ function_def_raw[stmt_ty]: (params) ? params : CHECK(_PyPegen_empty_arguments(p)), b, NULL, a, NEW_TYPE_COMMENT(p, tc), EXTRA) ) } -func_type_comment[PyObject*]: +func_type_comment[Token*]: | NEWLINE t=TYPE_COMMENT &(NEWLINE INDENT) { t } # Must be followed by indented block | invalid_double_type_comments | TYPE_COMMENT diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 33c92c232c54a..f4dacbffba493 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -408,7 +408,7 @@ static stmt_ty return_stmt_rule(Parser *p); static stmt_ty raise_stmt_rule(Parser *p); static stmt_ty function_def_rule(Parser *p); static stmt_ty function_def_raw_rule(Parser *p); -static PyObject* func_type_comment_rule(Parser *p); +static Token* func_type_comment_rule(Parser *p); static arguments_ty params_rule(Parser *p); static arguments_ty parameters_rule(Parser *p); static asdl_seq* slash_no_default_rule(Parser *p); @@ -3679,13 +3679,13 @@ function_def_raw_rule(Parser *p) // | NEWLINE TYPE_COMMENT &(NEWLINE INDENT) // | invalid_double_type_comments // | TYPE_COMMENT -static PyObject* +static Token* func_type_comment_rule(Parser *p) { if (p->error_indicator) { return NULL; } - PyObject* res = NULL; + Token* res = NULL; int mark = p->mark; { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) Token * newline_var; From webhook-mailer at python.org Fri May 1 11:37:00 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 01 May 2020 15:37:00 -0000 Subject: [Python-checkins] bpo-39562: Correctly updated the version section in the what's new document (GH-19838) Message-ID: https://github.com/python/cpython/commit/71e6122b4428ae43e868e34db4f072635f58a555 commit: 71e6122b4428ae43e868e34db4f072635f58a555 branch: 3.8 author: Pablo Galindo committer: GitHub date: 2020-05-01T16:36:51+01:00 summary: bpo-39562: Correctly updated the version section in the what's new document (GH-19838) files: M Doc/whatsnew/3.8.rst diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index dca7f48979e16..d2db5bff5afcf 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -2235,6 +2235,9 @@ Fixed a regression with the ``ignore`` callback of :func:`shutil.copytree`. The argument types are now str and List[str] again. (Contributed by Manuel Barkhau and Giampaolo Rodola in :issue:`39390`.) +Notable changes in Python 3.8.3 +=============================== + The constant values of future flags in the :mod:`__future__` module are updated in order to prevent collision with compiler flags. Previously ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. From webhook-mailer at python.org Fri May 1 12:42:11 2020 From: webhook-mailer at python.org (Guido van Rossum) Date: Fri, 01 May 2020 16:42:11 -0000 Subject: [Python-checkins] bpo-40334: Refactor lambda_parameters similar to parameters (GH-19830) Message-ID: https://github.com/python/cpython/commit/3941d9700b2a272689cb8a8435b5c60a1466ef79 commit: 3941d9700b2a272689cb8a8435b5c60a1466ef79 branch: master author: Guido van Rossum committer: GitHub date: 2020-05-01T17:42:03+01:00 summary: bpo-40334: Refactor lambda_parameters similar to parameters (GH-19830) files: M Grammar/python.gram M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 0acd851e09ff6..cbd4bc010dc1e 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -246,8 +246,7 @@ star_etc[StarEtc*]: _PyPegen_star_etc(p, NULL, b, c) } | a=kwds { _PyPegen_star_etc(p, NULL, NULL, a) } -kwds[arg_ty]: - | '**' a=param_no_default { a } +kwds[arg_ty]: '**' a=param_no_default { a } # One parameter. This *includes* a following comma and type comment. # @@ -325,32 +324,47 @@ expression[expr_ty] (memo): lambdef[expr_ty]: | 'lambda' a=[lambda_parameters] ':' b=expression { _Py_Lambda((a) ? a : CHECK(_PyPegen_empty_arguments(p)), b, EXTRA) } + +# lambda_parameters etc. duplicates parameters but without annotations +# or type comments, and if there's no comma after a parameter, we expect +# a colon, not a close parenthesis. (For more, see parameters above.) +# lambda_parameters[arguments_ty]: - | a=lambda_slash_without_default b=[',' x=lambda_plain_names { x }] c=[',' y=lambda_names_with_default { y }] d=[',' z=[lambda_star_etc] { z }] { + | a=lambda_slash_no_default b=lambda_param_no_default* c=lambda_param_with_default* d=[lambda_star_etc] { _PyPegen_make_arguments(p, a, NULL, b, c, d) } - | a=lambda_slash_with_default b=[',' y=lambda_names_with_default { y }] c=[',' z=[lambda_star_etc] { z }] { + | a=lambda_slash_with_default b=lambda_param_with_default* c=[lambda_star_etc] { _PyPegen_make_arguments(p, NULL, a, NULL, b, c) } - | a=lambda_plain_names b=[',' y=lambda_names_with_default { y }] c=[',' z=[lambda_star_etc] { z }] { + | a=lambda_param_no_default+ b=lambda_param_with_default* c=[lambda_star_etc] { _PyPegen_make_arguments(p, NULL, NULL, a, b, c) } - | a=lambda_names_with_default b=[',' z=[lambda_star_etc] { z }] { _PyPegen_make_arguments(p, NULL, NULL, NULL, a, b)} + | a=lambda_param_with_default+ b=[lambda_star_etc] { _PyPegen_make_arguments(p, NULL, NULL, NULL, a, b)} | a=lambda_star_etc { _PyPegen_make_arguments(p, NULL, NULL, NULL, NULL, a) } -lambda_slash_without_default[asdl_seq*]: a=lambda_plain_names ',' '/' { a } -lambda_slash_with_default[SlashWithDefault*]: a=[n=lambda_plain_names ',' { n }] b=lambda_names_with_default ',' '/' { - _PyPegen_slash_with_default(p, a, b) } + +lambda_slash_no_default[asdl_seq*]: + | a=lambda_param_no_default+ '/' ',' { a } + | a=lambda_param_no_default+ '/' &':' { a } +lambda_slash_with_default[SlashWithDefault*]: + | a=lambda_param_no_default* b=lambda_param_with_default+ '/' ',' { _PyPegen_slash_with_default(p, a, b) } + | a=lambda_param_no_default* b=lambda_param_with_default+ '/' &':' { _PyPegen_slash_with_default(p, a, b) } + lambda_star_etc[StarEtc*]: - | '*' a=lambda_plain_name b=lambda_name_with_optional_default* c=[',' d=lambda_kwds { d }] [','] { + | '*' a=lambda_param_no_default b=lambda_param_maybe_default* c=[lambda_kwds] { _PyPegen_star_etc(p, a, b, c) } - | '*' b=lambda_name_with_optional_default+ c=[',' d=lambda_kwds { d }] [','] { + | '*' ',' b=lambda_param_maybe_default+ c=[lambda_kwds] { _PyPegen_star_etc(p, NULL, b, c) } - | a=lambda_kwds [','] { _PyPegen_star_etc(p, NULL, NULL, a) } -lambda_name_with_optional_default[NameDefaultPair*]: - | ',' a=lambda_plain_name b=['=' e=expression { e }] { _PyPegen_name_default_pair(p, a, b, NULL) } -lambda_names_with_default[asdl_seq*]: a=','.lambda_name_with_default+ { a } -lambda_name_with_default[NameDefaultPair*]: - | n=lambda_plain_name '=' e=expression { _PyPegen_name_default_pair(p, n, e, NULL) } -lambda_plain_names[asdl_seq*]: a=','.(lambda_plain_name !'=')+ { a } -lambda_plain_name[arg_ty]: a=NAME { _Py_arg(a->v.Name.id, NULL, NULL, EXTRA) } -lambda_kwds[arg_ty]: '**' a=lambda_plain_name { a } + | a=lambda_kwds { _PyPegen_star_etc(p, NULL, NULL, a) } + +lambda_kwds[arg_ty]: '**' a=lambda_param_no_default { a } + +lambda_param_no_default[arg_ty]: + | a=lambda_param ',' { a } + | a=lambda_param &':' { a } +lambda_param_with_default[NameDefaultPair*]: + | a=lambda_param c=default ',' { _PyPegen_name_default_pair(p, a, c, NULL) } + | a=lambda_param c=default &':' { _PyPegen_name_default_pair(p, a, c, NULL) } +lambda_param_maybe_default[NameDefaultPair*]: + | a=lambda_param c=default? ',' { _PyPegen_name_default_pair(p, a, c, NULL) } + | a=lambda_param c=default? &':' { _PyPegen_name_default_pair(p, a, c, NULL) } +lambda_param[arg_ty]: a=NAME { _Py_arg(a->v.Name.id, NULL, NULL, EXTRA) } disjunction[expr_ty] (memo): | a=conjunction b=('or' c=conjunction { c })+ { _Py_BoolOp( diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index f4dacbffba493..b4745ba4d4f26 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -138,233 +138,227 @@ static KeywordToken *reserved_keywords[] = { #define expression_type 1067 #define lambdef_type 1068 #define lambda_parameters_type 1069 -#define lambda_slash_without_default_type 1070 +#define lambda_slash_no_default_type 1070 #define lambda_slash_with_default_type 1071 #define lambda_star_etc_type 1072 -#define lambda_name_with_optional_default_type 1073 -#define lambda_names_with_default_type 1074 -#define lambda_name_with_default_type 1075 -#define lambda_plain_names_type 1076 -#define lambda_plain_name_type 1077 -#define lambda_kwds_type 1078 -#define disjunction_type 1079 -#define conjunction_type 1080 -#define inversion_type 1081 -#define comparison_type 1082 -#define compare_op_bitwise_or_pair_type 1083 -#define eq_bitwise_or_type 1084 -#define noteq_bitwise_or_type 1085 -#define lte_bitwise_or_type 1086 -#define lt_bitwise_or_type 1087 -#define gte_bitwise_or_type 1088 -#define gt_bitwise_or_type 1089 -#define notin_bitwise_or_type 1090 -#define in_bitwise_or_type 1091 -#define isnot_bitwise_or_type 1092 -#define is_bitwise_or_type 1093 -#define bitwise_or_type 1094 // Left-recursive -#define bitwise_xor_type 1095 // Left-recursive -#define bitwise_and_type 1096 // Left-recursive -#define shift_expr_type 1097 // Left-recursive -#define sum_type 1098 // Left-recursive -#define term_type 1099 // Left-recursive -#define factor_type 1100 -#define power_type 1101 -#define await_primary_type 1102 -#define primary_type 1103 // Left-recursive -#define slices_type 1104 -#define slice_type 1105 -#define atom_type 1106 -#define strings_type 1107 -#define list_type 1108 -#define listcomp_type 1109 -#define tuple_type 1110 -#define group_type 1111 -#define genexp_type 1112 -#define set_type 1113 -#define setcomp_type 1114 -#define dict_type 1115 -#define dictcomp_type 1116 -#define kvpairs_type 1117 -#define kvpair_type 1118 -#define for_if_clauses_type 1119 -#define for_if_clause_type 1120 -#define yield_expr_type 1121 -#define arguments_type 1122 -#define args_type 1123 -#define kwargs_type 1124 -#define starred_expression_type 1125 -#define kwarg_or_starred_type 1126 -#define kwarg_or_double_starred_type 1127 -#define star_targets_type 1128 -#define star_targets_seq_type 1129 -#define star_target_type 1130 -#define star_atom_type 1131 -#define inside_paren_ann_assign_target_type 1132 -#define ann_assign_subscript_attribute_target_type 1133 -#define del_targets_type 1134 -#define del_target_type 1135 -#define del_t_atom_type 1136 -#define targets_type 1137 -#define target_type 1138 -#define t_primary_type 1139 // Left-recursive -#define t_lookahead_type 1140 -#define t_atom_type 1141 -#define incorrect_arguments_type 1142 -#define invalid_named_expression_type 1143 -#define invalid_assignment_type 1144 -#define invalid_block_type 1145 -#define invalid_comprehension_type 1146 -#define invalid_parameters_type 1147 -#define invalid_double_type_comments_type 1148 -#define _loop0_1_type 1149 -#define _loop0_2_type 1150 -#define _loop0_4_type 1151 -#define _gather_3_type 1152 -#define _loop0_6_type 1153 -#define _gather_5_type 1154 -#define _loop0_8_type 1155 -#define _gather_7_type 1156 -#define _loop0_10_type 1157 -#define _gather_9_type 1158 -#define _loop1_11_type 1159 -#define _loop0_13_type 1160 -#define _gather_12_type 1161 -#define _tmp_14_type 1162 -#define _tmp_15_type 1163 -#define _tmp_16_type 1164 -#define _tmp_17_type 1165 -#define _tmp_18_type 1166 -#define _tmp_19_type 1167 -#define _tmp_20_type 1168 -#define _tmp_21_type 1169 -#define _loop1_22_type 1170 -#define _tmp_23_type 1171 -#define _tmp_24_type 1172 -#define _loop0_26_type 1173 -#define _gather_25_type 1174 -#define _loop0_28_type 1175 -#define _gather_27_type 1176 -#define _tmp_29_type 1177 -#define _loop0_30_type 1178 -#define _loop1_31_type 1179 -#define _loop0_33_type 1180 -#define _gather_32_type 1181 -#define _tmp_34_type 1182 -#define _loop0_36_type 1183 -#define _gather_35_type 1184 -#define _tmp_37_type 1185 -#define _loop0_39_type 1186 -#define _gather_38_type 1187 -#define _loop0_41_type 1188 -#define _gather_40_type 1189 -#define _loop0_43_type 1190 -#define _gather_42_type 1191 -#define _loop0_45_type 1192 -#define _gather_44_type 1193 -#define _tmp_46_type 1194 -#define _loop1_47_type 1195 -#define _tmp_48_type 1196 -#define _tmp_49_type 1197 -#define _tmp_50_type 1198 -#define _tmp_51_type 1199 -#define _tmp_52_type 1200 -#define _loop0_53_type 1201 -#define _loop0_54_type 1202 -#define _loop0_55_type 1203 -#define _loop1_56_type 1204 -#define _loop0_57_type 1205 -#define _loop1_58_type 1206 -#define _loop1_59_type 1207 -#define _loop1_60_type 1208 -#define _loop0_61_type 1209 -#define _loop1_62_type 1210 -#define _loop0_63_type 1211 -#define _loop1_64_type 1212 -#define _loop0_65_type 1213 -#define _loop1_66_type 1214 -#define _loop1_67_type 1215 -#define _tmp_68_type 1216 -#define _loop0_70_type 1217 -#define _gather_69_type 1218 -#define _loop1_71_type 1219 -#define _loop0_73_type 1220 -#define _gather_72_type 1221 -#define _loop1_74_type 1222 -#define _tmp_75_type 1223 -#define _tmp_76_type 1224 -#define _tmp_77_type 1225 -#define _tmp_78_type 1226 -#define _tmp_79_type 1227 -#define _tmp_80_type 1228 -#define _tmp_81_type 1229 -#define _tmp_82_type 1230 -#define _tmp_83_type 1231 -#define _loop0_84_type 1232 -#define _tmp_85_type 1233 -#define _loop1_86_type 1234 -#define _tmp_87_type 1235 -#define _tmp_88_type 1236 -#define _loop0_90_type 1237 -#define _gather_89_type 1238 -#define _loop0_92_type 1239 -#define _gather_91_type 1240 -#define _loop1_93_type 1241 -#define _loop1_94_type 1242 -#define _loop1_95_type 1243 -#define _tmp_96_type 1244 -#define _loop0_98_type 1245 -#define _gather_97_type 1246 -#define _tmp_99_type 1247 -#define _tmp_100_type 1248 -#define _tmp_101_type 1249 -#define _tmp_102_type 1250 -#define _loop1_103_type 1251 -#define _tmp_104_type 1252 -#define _tmp_105_type 1253 -#define _loop0_107_type 1254 -#define _gather_106_type 1255 -#define _loop1_108_type 1256 -#define _loop0_109_type 1257 -#define _loop0_110_type 1258 -#define _tmp_111_type 1259 -#define _tmp_112_type 1260 -#define _loop0_114_type 1261 -#define _gather_113_type 1262 -#define _loop0_116_type 1263 -#define _gather_115_type 1264 -#define _loop0_118_type 1265 -#define _gather_117_type 1266 -#define _loop0_120_type 1267 -#define _gather_119_type 1268 -#define _loop0_121_type 1269 -#define _loop0_123_type 1270 -#define _gather_122_type 1271 -#define _tmp_124_type 1272 -#define _loop0_126_type 1273 -#define _gather_125_type 1274 -#define _loop0_128_type 1275 -#define _gather_127_type 1276 -#define _tmp_129_type 1277 -#define _tmp_130_type 1278 -#define _tmp_131_type 1279 -#define _tmp_132_type 1280 -#define _tmp_133_type 1281 -#define _loop0_134_type 1282 -#define _tmp_135_type 1283 -#define _tmp_136_type 1284 -#define _tmp_137_type 1285 -#define _tmp_138_type 1286 -#define _tmp_139_type 1287 -#define _tmp_140_type 1288 -#define _tmp_141_type 1289 -#define _tmp_142_type 1290 -#define _tmp_143_type 1291 -#define _tmp_144_type 1292 -#define _tmp_145_type 1293 -#define _tmp_146_type 1294 -#define _tmp_147_type 1295 -#define _loop1_148_type 1296 +#define lambda_kwds_type 1073 +#define lambda_param_no_default_type 1074 +#define lambda_param_with_default_type 1075 +#define lambda_param_maybe_default_type 1076 +#define lambda_param_type 1077 +#define disjunction_type 1078 +#define conjunction_type 1079 +#define inversion_type 1080 +#define comparison_type 1081 +#define compare_op_bitwise_or_pair_type 1082 +#define eq_bitwise_or_type 1083 +#define noteq_bitwise_or_type 1084 +#define lte_bitwise_or_type 1085 +#define lt_bitwise_or_type 1086 +#define gte_bitwise_or_type 1087 +#define gt_bitwise_or_type 1088 +#define notin_bitwise_or_type 1089 +#define in_bitwise_or_type 1090 +#define isnot_bitwise_or_type 1091 +#define is_bitwise_or_type 1092 +#define bitwise_or_type 1093 // Left-recursive +#define bitwise_xor_type 1094 // Left-recursive +#define bitwise_and_type 1095 // Left-recursive +#define shift_expr_type 1096 // Left-recursive +#define sum_type 1097 // Left-recursive +#define term_type 1098 // Left-recursive +#define factor_type 1099 +#define power_type 1100 +#define await_primary_type 1101 +#define primary_type 1102 // Left-recursive +#define slices_type 1103 +#define slice_type 1104 +#define atom_type 1105 +#define strings_type 1106 +#define list_type 1107 +#define listcomp_type 1108 +#define tuple_type 1109 +#define group_type 1110 +#define genexp_type 1111 +#define set_type 1112 +#define setcomp_type 1113 +#define dict_type 1114 +#define dictcomp_type 1115 +#define kvpairs_type 1116 +#define kvpair_type 1117 +#define for_if_clauses_type 1118 +#define for_if_clause_type 1119 +#define yield_expr_type 1120 +#define arguments_type 1121 +#define args_type 1122 +#define kwargs_type 1123 +#define starred_expression_type 1124 +#define kwarg_or_starred_type 1125 +#define kwarg_or_double_starred_type 1126 +#define star_targets_type 1127 +#define star_targets_seq_type 1128 +#define star_target_type 1129 +#define star_atom_type 1130 +#define inside_paren_ann_assign_target_type 1131 +#define ann_assign_subscript_attribute_target_type 1132 +#define del_targets_type 1133 +#define del_target_type 1134 +#define del_t_atom_type 1135 +#define targets_type 1136 +#define target_type 1137 +#define t_primary_type 1138 // Left-recursive +#define t_lookahead_type 1139 +#define t_atom_type 1140 +#define incorrect_arguments_type 1141 +#define invalid_named_expression_type 1142 +#define invalid_assignment_type 1143 +#define invalid_block_type 1144 +#define invalid_comprehension_type 1145 +#define invalid_parameters_type 1146 +#define invalid_double_type_comments_type 1147 +#define _loop0_1_type 1148 +#define _loop0_2_type 1149 +#define _loop0_4_type 1150 +#define _gather_3_type 1151 +#define _loop0_6_type 1152 +#define _gather_5_type 1153 +#define _loop0_8_type 1154 +#define _gather_7_type 1155 +#define _loop0_10_type 1156 +#define _gather_9_type 1157 +#define _loop1_11_type 1158 +#define _loop0_13_type 1159 +#define _gather_12_type 1160 +#define _tmp_14_type 1161 +#define _tmp_15_type 1162 +#define _tmp_16_type 1163 +#define _tmp_17_type 1164 +#define _tmp_18_type 1165 +#define _tmp_19_type 1166 +#define _tmp_20_type 1167 +#define _tmp_21_type 1168 +#define _loop1_22_type 1169 +#define _tmp_23_type 1170 +#define _tmp_24_type 1171 +#define _loop0_26_type 1172 +#define _gather_25_type 1173 +#define _loop0_28_type 1174 +#define _gather_27_type 1175 +#define _tmp_29_type 1176 +#define _loop0_30_type 1177 +#define _loop1_31_type 1178 +#define _loop0_33_type 1179 +#define _gather_32_type 1180 +#define _tmp_34_type 1181 +#define _loop0_36_type 1182 +#define _gather_35_type 1183 +#define _tmp_37_type 1184 +#define _loop0_39_type 1185 +#define _gather_38_type 1186 +#define _loop0_41_type 1187 +#define _gather_40_type 1188 +#define _loop0_43_type 1189 +#define _gather_42_type 1190 +#define _loop0_45_type 1191 +#define _gather_44_type 1192 +#define _tmp_46_type 1193 +#define _loop1_47_type 1194 +#define _tmp_48_type 1195 +#define _tmp_49_type 1196 +#define _tmp_50_type 1197 +#define _tmp_51_type 1198 +#define _tmp_52_type 1199 +#define _loop0_53_type 1200 +#define _loop0_54_type 1201 +#define _loop0_55_type 1202 +#define _loop1_56_type 1203 +#define _loop0_57_type 1204 +#define _loop1_58_type 1205 +#define _loop1_59_type 1206 +#define _loop1_60_type 1207 +#define _loop0_61_type 1208 +#define _loop1_62_type 1209 +#define _loop0_63_type 1210 +#define _loop1_64_type 1211 +#define _loop0_65_type 1212 +#define _loop1_66_type 1213 +#define _loop1_67_type 1214 +#define _tmp_68_type 1215 +#define _loop0_70_type 1216 +#define _gather_69_type 1217 +#define _loop1_71_type 1218 +#define _loop0_73_type 1219 +#define _gather_72_type 1220 +#define _loop1_74_type 1221 +#define _loop0_75_type 1222 +#define _loop0_76_type 1223 +#define _loop0_77_type 1224 +#define _loop1_78_type 1225 +#define _loop0_79_type 1226 +#define _loop1_80_type 1227 +#define _loop1_81_type 1228 +#define _loop1_82_type 1229 +#define _loop0_83_type 1230 +#define _loop1_84_type 1231 +#define _loop0_85_type 1232 +#define _loop1_86_type 1233 +#define _loop0_87_type 1234 +#define _loop1_88_type 1235 +#define _loop1_89_type 1236 +#define _loop1_90_type 1237 +#define _loop1_91_type 1238 +#define _tmp_92_type 1239 +#define _loop0_94_type 1240 +#define _gather_93_type 1241 +#define _tmp_95_type 1242 +#define _tmp_96_type 1243 +#define _tmp_97_type 1244 +#define _tmp_98_type 1245 +#define _loop1_99_type 1246 +#define _tmp_100_type 1247 +#define _tmp_101_type 1248 +#define _loop0_103_type 1249 +#define _gather_102_type 1250 +#define _loop1_104_type 1251 +#define _loop0_105_type 1252 +#define _loop0_106_type 1253 +#define _tmp_107_type 1254 +#define _tmp_108_type 1255 +#define _loop0_110_type 1256 +#define _gather_109_type 1257 +#define _loop0_112_type 1258 +#define _gather_111_type 1259 +#define _loop0_114_type 1260 +#define _gather_113_type 1261 +#define _loop0_116_type 1262 +#define _gather_115_type 1263 +#define _loop0_117_type 1264 +#define _loop0_119_type 1265 +#define _gather_118_type 1266 +#define _tmp_120_type 1267 +#define _loop0_122_type 1268 +#define _gather_121_type 1269 +#define _loop0_124_type 1270 +#define _gather_123_type 1271 +#define _tmp_125_type 1272 +#define _tmp_126_type 1273 +#define _tmp_127_type 1274 +#define _tmp_128_type 1275 +#define _tmp_129_type 1276 +#define _loop0_130_type 1277 +#define _tmp_131_type 1278 +#define _tmp_132_type 1279 +#define _tmp_133_type 1280 +#define _tmp_134_type 1281 +#define _tmp_135_type 1282 +#define _tmp_136_type 1283 +#define _tmp_137_type 1284 +#define _tmp_138_type 1285 +#define _tmp_139_type 1286 +#define _tmp_140_type 1287 +#define _tmp_141_type 1288 +#define _tmp_142_type 1289 +#define _loop1_143_type 1290 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -436,15 +430,14 @@ static expr_ty expressions_rule(Parser *p); static expr_ty expression_rule(Parser *p); static expr_ty lambdef_rule(Parser *p); static arguments_ty lambda_parameters_rule(Parser *p); -static asdl_seq* lambda_slash_without_default_rule(Parser *p); +static asdl_seq* lambda_slash_no_default_rule(Parser *p); static SlashWithDefault* lambda_slash_with_default_rule(Parser *p); static StarEtc* lambda_star_etc_rule(Parser *p); -static NameDefaultPair* lambda_name_with_optional_default_rule(Parser *p); -static asdl_seq* lambda_names_with_default_rule(Parser *p); -static NameDefaultPair* lambda_name_with_default_rule(Parser *p); -static asdl_seq* lambda_plain_names_rule(Parser *p); -static arg_ty lambda_plain_name_rule(Parser *p); static arg_ty lambda_kwds_rule(Parser *p); +static arg_ty lambda_param_no_default_rule(Parser *p); +static NameDefaultPair* lambda_param_with_default_rule(Parser *p); +static NameDefaultPair* lambda_param_maybe_default_rule(Parser *p); +static arg_ty lambda_param_rule(Parser *p); static expr_ty disjunction_rule(Parser *p); static expr_ty conjunction_rule(Parser *p); static expr_ty inversion_rule(Parser *p); @@ -589,66 +582,66 @@ static asdl_seq *_loop1_71_rule(Parser *p); static asdl_seq *_loop0_73_rule(Parser *p); static asdl_seq *_gather_72_rule(Parser *p); static asdl_seq *_loop1_74_rule(Parser *p); -static void *_tmp_75_rule(Parser *p); -static void *_tmp_76_rule(Parser *p); -static void *_tmp_77_rule(Parser *p); -static void *_tmp_78_rule(Parser *p); -static void *_tmp_79_rule(Parser *p); -static void *_tmp_80_rule(Parser *p); -static void *_tmp_81_rule(Parser *p); -static void *_tmp_82_rule(Parser *p); -static void *_tmp_83_rule(Parser *p); -static asdl_seq *_loop0_84_rule(Parser *p); -static void *_tmp_85_rule(Parser *p); +static asdl_seq *_loop0_75_rule(Parser *p); +static asdl_seq *_loop0_76_rule(Parser *p); +static asdl_seq *_loop0_77_rule(Parser *p); +static asdl_seq *_loop1_78_rule(Parser *p); +static asdl_seq *_loop0_79_rule(Parser *p); +static asdl_seq *_loop1_80_rule(Parser *p); +static asdl_seq *_loop1_81_rule(Parser *p); +static asdl_seq *_loop1_82_rule(Parser *p); +static asdl_seq *_loop0_83_rule(Parser *p); +static asdl_seq *_loop1_84_rule(Parser *p); +static asdl_seq *_loop0_85_rule(Parser *p); static asdl_seq *_loop1_86_rule(Parser *p); -static void *_tmp_87_rule(Parser *p); -static void *_tmp_88_rule(Parser *p); -static asdl_seq *_loop0_90_rule(Parser *p); -static asdl_seq *_gather_89_rule(Parser *p); -static asdl_seq *_loop0_92_rule(Parser *p); -static asdl_seq *_gather_91_rule(Parser *p); -static asdl_seq *_loop1_93_rule(Parser *p); -static asdl_seq *_loop1_94_rule(Parser *p); -static asdl_seq *_loop1_95_rule(Parser *p); +static asdl_seq *_loop0_87_rule(Parser *p); +static asdl_seq *_loop1_88_rule(Parser *p); +static asdl_seq *_loop1_89_rule(Parser *p); +static asdl_seq *_loop1_90_rule(Parser *p); +static asdl_seq *_loop1_91_rule(Parser *p); +static void *_tmp_92_rule(Parser *p); +static asdl_seq *_loop0_94_rule(Parser *p); +static asdl_seq *_gather_93_rule(Parser *p); +static void *_tmp_95_rule(Parser *p); static void *_tmp_96_rule(Parser *p); -static asdl_seq *_loop0_98_rule(Parser *p); -static asdl_seq *_gather_97_rule(Parser *p); -static void *_tmp_99_rule(Parser *p); +static void *_tmp_97_rule(Parser *p); +static void *_tmp_98_rule(Parser *p); +static asdl_seq *_loop1_99_rule(Parser *p); static void *_tmp_100_rule(Parser *p); static void *_tmp_101_rule(Parser *p); -static void *_tmp_102_rule(Parser *p); -static asdl_seq *_loop1_103_rule(Parser *p); -static void *_tmp_104_rule(Parser *p); -static void *_tmp_105_rule(Parser *p); -static asdl_seq *_loop0_107_rule(Parser *p); -static asdl_seq *_gather_106_rule(Parser *p); -static asdl_seq *_loop1_108_rule(Parser *p); -static asdl_seq *_loop0_109_rule(Parser *p); +static asdl_seq *_loop0_103_rule(Parser *p); +static asdl_seq *_gather_102_rule(Parser *p); +static asdl_seq *_loop1_104_rule(Parser *p); +static asdl_seq *_loop0_105_rule(Parser *p); +static asdl_seq *_loop0_106_rule(Parser *p); +static void *_tmp_107_rule(Parser *p); +static void *_tmp_108_rule(Parser *p); static asdl_seq *_loop0_110_rule(Parser *p); -static void *_tmp_111_rule(Parser *p); -static void *_tmp_112_rule(Parser *p); +static asdl_seq *_gather_109_rule(Parser *p); +static asdl_seq *_loop0_112_rule(Parser *p); +static asdl_seq *_gather_111_rule(Parser *p); static asdl_seq *_loop0_114_rule(Parser *p); static asdl_seq *_gather_113_rule(Parser *p); static asdl_seq *_loop0_116_rule(Parser *p); static asdl_seq *_gather_115_rule(Parser *p); -static asdl_seq *_loop0_118_rule(Parser *p); -static asdl_seq *_gather_117_rule(Parser *p); -static asdl_seq *_loop0_120_rule(Parser *p); -static asdl_seq *_gather_119_rule(Parser *p); -static asdl_seq *_loop0_121_rule(Parser *p); -static asdl_seq *_loop0_123_rule(Parser *p); -static asdl_seq *_gather_122_rule(Parser *p); -static void *_tmp_124_rule(Parser *p); -static asdl_seq *_loop0_126_rule(Parser *p); -static asdl_seq *_gather_125_rule(Parser *p); -static asdl_seq *_loop0_128_rule(Parser *p); -static asdl_seq *_gather_127_rule(Parser *p); +static asdl_seq *_loop0_117_rule(Parser *p); +static asdl_seq *_loop0_119_rule(Parser *p); +static asdl_seq *_gather_118_rule(Parser *p); +static void *_tmp_120_rule(Parser *p); +static asdl_seq *_loop0_122_rule(Parser *p); +static asdl_seq *_gather_121_rule(Parser *p); +static asdl_seq *_loop0_124_rule(Parser *p); +static asdl_seq *_gather_123_rule(Parser *p); +static void *_tmp_125_rule(Parser *p); +static void *_tmp_126_rule(Parser *p); +static void *_tmp_127_rule(Parser *p); +static void *_tmp_128_rule(Parser *p); static void *_tmp_129_rule(Parser *p); -static void *_tmp_130_rule(Parser *p); +static asdl_seq *_loop0_130_rule(Parser *p); static void *_tmp_131_rule(Parser *p); static void *_tmp_132_rule(Parser *p); static void *_tmp_133_rule(Parser *p); -static asdl_seq *_loop0_134_rule(Parser *p); +static void *_tmp_134_rule(Parser *p); static void *_tmp_135_rule(Parser *p); static void *_tmp_136_rule(Parser *p); static void *_tmp_137_rule(Parser *p); @@ -657,12 +650,7 @@ static void *_tmp_139_rule(Parser *p); static void *_tmp_140_rule(Parser *p); static void *_tmp_141_rule(Parser *p); static void *_tmp_142_rule(Parser *p); -static void *_tmp_143_rule(Parser *p); -static void *_tmp_144_rule(Parser *p); -static void *_tmp_145_rule(Parser *p); -static void *_tmp_146_rule(Parser *p); -static void *_tmp_147_rule(Parser *p); -static asdl_seq *_loop1_148_rule(Parser *p); +static asdl_seq *_loop1_143_rule(Parser *p); // file: statements? $ @@ -5217,10 +5205,10 @@ lambdef_rule(Parser *p) } // lambda_parameters: -// | lambda_slash_without_default [',' lambda_plain_names] [',' lambda_names_with_default] [',' lambda_star_etc?] -// | lambda_slash_with_default [',' lambda_names_with_default] [',' lambda_star_etc?] -// | lambda_plain_names [',' lambda_names_with_default] [',' lambda_star_etc?] -// | lambda_names_with_default [',' lambda_star_etc?] +// | lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? +// | lambda_slash_with_default lambda_param_with_default* lambda_star_etc? +// | lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? +// | lambda_param_with_default+ lambda_star_etc? // | lambda_star_etc static arguments_ty lambda_parameters_rule(Parser *p) @@ -5230,19 +5218,19 @@ lambda_parameters_rule(Parser *p) } arguments_ty res = NULL; int mark = p->mark; - { // lambda_slash_without_default [',' lambda_plain_names] [',' lambda_names_with_default] [',' lambda_star_etc?] + { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? asdl_seq* a; - void *b; - void *c; + asdl_seq * b; + asdl_seq * c; void *d; if ( - (a = lambda_slash_without_default_rule(p)) + (a = lambda_slash_no_default_rule(p)) && - (b = _tmp_75_rule(p), 1) + (b = _loop0_75_rule(p)) && - (c = _tmp_76_rule(p), 1) + (c = _loop0_76_rule(p)) && - (d = _tmp_77_rule(p), 1) + (d = lambda_star_etc_rule(p), 1) ) { res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); @@ -5254,16 +5242,16 @@ lambda_parameters_rule(Parser *p) } p->mark = mark; } - { // lambda_slash_with_default [',' lambda_names_with_default] [',' lambda_star_etc?] + { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? SlashWithDefault* a; - void *b; + asdl_seq * b; void *c; if ( (a = lambda_slash_with_default_rule(p)) && - (b = _tmp_78_rule(p), 1) + (b = _loop0_77_rule(p)) && - (c = _tmp_79_rule(p), 1) + (c = lambda_star_etc_rule(p), 1) ) { res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); @@ -5275,16 +5263,16 @@ lambda_parameters_rule(Parser *p) } p->mark = mark; } - { // lambda_plain_names [',' lambda_names_with_default] [',' lambda_star_etc?] - asdl_seq* a; - void *b; + { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? + asdl_seq * a; + asdl_seq * b; void *c; if ( - (a = lambda_plain_names_rule(p)) + (a = _loop1_78_rule(p)) && - (b = _tmp_80_rule(p), 1) + (b = _loop0_79_rule(p)) && - (c = _tmp_81_rule(p), 1) + (c = lambda_star_etc_rule(p), 1) ) { res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); @@ -5296,13 +5284,13 @@ lambda_parameters_rule(Parser *p) } p->mark = mark; } - { // lambda_names_with_default [',' lambda_star_etc?] - asdl_seq* a; + { // lambda_param_with_default+ lambda_star_etc? + asdl_seq * a; void *b; if ( - (a = lambda_names_with_default_rule(p)) + (a = _loop1_80_rule(p)) && - (b = _tmp_82_rule(p), 1) + (b = lambda_star_etc_rule(p), 1) ) { res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); @@ -5334,25 +5322,47 @@ lambda_parameters_rule(Parser *p) return res; } -// lambda_slash_without_default: lambda_plain_names ',' '/' +// lambda_slash_no_default: +// | lambda_param_no_default+ '/' ',' +// | lambda_param_no_default+ '/' &':' static asdl_seq* -lambda_slash_without_default_rule(Parser *p) +lambda_slash_no_default_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq* res = NULL; int mark = p->mark; - { // lambda_plain_names ',' '/' - asdl_seq* a; + { // lambda_param_no_default+ '/' ',' + asdl_seq * a; Token * literal; Token * literal_1; if ( - (a = lambda_plain_names_rule(p)) + (a = _loop1_81_rule(p)) && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 17)) + && + (literal_1 = _PyPegen_expect_token(p, 12)) + ) + { + res = a; + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + { // lambda_param_no_default+ '/' &':' + asdl_seq * a; + Token * literal; + if ( + (a = _loop1_82_rule(p)) && - (literal_1 = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) ) { res = a; @@ -5369,7 +5379,9 @@ lambda_slash_without_default_rule(Parser *p) return res; } -// lambda_slash_with_default: [lambda_plain_names ','] lambda_names_with_default ',' '/' +// lambda_slash_with_default: +// | lambda_param_no_default* lambda_param_with_default+ '/' ',' +// | lambda_param_no_default* lambda_param_with_default+ '/' &':' static SlashWithDefault* lambda_slash_with_default_rule(Parser *p) { @@ -5378,19 +5390,42 @@ lambda_slash_with_default_rule(Parser *p) } SlashWithDefault* res = NULL; int mark = p->mark; - { // [lambda_plain_names ','] lambda_names_with_default ',' '/' - void *a; - asdl_seq* b; + { // lambda_param_no_default* lambda_param_with_default+ '/' ',' + asdl_seq * a; + asdl_seq * b; Token * literal; Token * literal_1; if ( - (a = _tmp_83_rule(p), 1) + (a = _loop0_83_rule(p)) && - (b = lambda_names_with_default_rule(p)) + (b = _loop1_84_rule(p)) && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 17)) + && + (literal_1 = _PyPegen_expect_token(p, 12)) + ) + { + res = _PyPegen_slash_with_default ( p , a , b ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + { // lambda_param_no_default* lambda_param_with_default+ '/' &':' + asdl_seq * a; + asdl_seq * b; + Token * literal; + if ( + (a = _loop0_85_rule(p)) + && + (b = _loop1_86_rule(p)) + && + (literal = _PyPegen_expect_token(p, 17)) && - (literal_1 = _PyPegen_expect_token(p, 17)) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) ) { res = _PyPegen_slash_with_default ( p , a , b ); @@ -5408,9 +5443,9 @@ lambda_slash_with_default_rule(Parser *p) } // lambda_star_etc: -// | '*' lambda_plain_name lambda_name_with_optional_default* [',' lambda_kwds] ','? -// | '*' lambda_name_with_optional_default+ [',' lambda_kwds] ','? -// | lambda_kwds ','? +// | '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? +// | '*' ',' lambda_param_maybe_default+ lambda_kwds? +// | lambda_kwds static StarEtc* lambda_star_etc_rule(Parser *p) { @@ -5419,23 +5454,19 @@ lambda_star_etc_rule(Parser *p) } StarEtc* res = NULL; int mark = p->mark; - { // '*' lambda_plain_name lambda_name_with_optional_default* [',' lambda_kwds] ','? + { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? arg_ty a; asdl_seq * b; void *c; Token * literal; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (literal = _PyPegen_expect_token(p, 16)) && - (a = lambda_plain_name_rule(p)) - && - (b = _loop0_84_rule(p)) + (a = lambda_param_no_default_rule(p)) && - (c = _tmp_85_rule(p), 1) + (b = _loop0_87_rule(p)) && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (c = lambda_kwds_rule(p), 1) ) { res = _PyPegen_star_etc ( p , a , b , c ); @@ -5447,20 +5478,19 @@ lambda_star_etc_rule(Parser *p) } p->mark = mark; } - { // '*' lambda_name_with_optional_default+ [',' lambda_kwds] ','? + { // '*' ',' lambda_param_maybe_default+ lambda_kwds? asdl_seq * b; void *c; Token * literal; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings + Token * literal_1; if ( (literal = _PyPegen_expect_token(p, 16)) && - (b = _loop1_86_rule(p)) + (literal_1 = _PyPegen_expect_token(p, 12)) && - (c = _tmp_87_rule(p), 1) + (b = _loop1_88_rule(p)) && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (c = lambda_kwds_rule(p), 1) ) { res = _PyPegen_star_etc ( p , NULL , b , c ); @@ -5472,14 +5502,10 @@ lambda_star_etc_rule(Parser *p) } p->mark = mark; } - { // lambda_kwds ','? + { // lambda_kwds arg_ty a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = lambda_kwds_rule(p)) - && - (opt_var = _PyPegen_expect_token(p, 12), 1) ) { res = _PyPegen_star_etc ( p , NULL , NULL , a ); @@ -5496,28 +5522,25 @@ lambda_star_etc_rule(Parser *p) return res; } -// lambda_name_with_optional_default: ',' lambda_plain_name ['=' expression] -static NameDefaultPair* -lambda_name_with_optional_default_rule(Parser *p) +// lambda_kwds: '**' lambda_param_no_default +static arg_ty +lambda_kwds_rule(Parser *p) { if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; + arg_ty res = NULL; int mark = p->mark; - { // ',' lambda_plain_name ['=' expression] + { // '**' lambda_param_no_default arg_ty a; - void *b; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (a = lambda_plain_name_rule(p)) + (literal = _PyPegen_expect_token(p, 35)) && - (b = _tmp_88_rule(p), 1) + (a = lambda_param_no_default_rule(p)) ) { - res = _PyPegen_name_default_pair ( p , a , b , NULL ); + res = a; if (res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -5531,19 +5554,39 @@ lambda_name_with_optional_default_rule(Parser *p) return res; } -// lambda_names_with_default: ','.lambda_name_with_default+ -static asdl_seq* -lambda_names_with_default_rule(Parser *p) +// lambda_param_no_default: lambda_param ',' | lambda_param &':' +static arg_ty +lambda_param_no_default_rule(Parser *p) { if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; + arg_ty res = NULL; int mark = p->mark; - { // ','.lambda_name_with_default+ - asdl_seq * a; + { // lambda_param ',' + arg_ty a; + Token * literal; + if ( + (a = lambda_param_rule(p)) + && + (literal = _PyPegen_expect_token(p, 12)) + ) + { + res = a; + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + { // lambda_param &':' + arg_ty a; if ( - (a = _gather_89_rule(p)) + (a = lambda_param_rule(p)) + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) ) { res = a; @@ -5560,28 +5603,48 @@ lambda_names_with_default_rule(Parser *p) return res; } -// lambda_name_with_default: lambda_plain_name '=' expression +// lambda_param_with_default: lambda_param default ',' | lambda_param default &':' static NameDefaultPair* -lambda_name_with_default_rule(Parser *p) +lambda_param_with_default_rule(Parser *p) { if (p->error_indicator) { return NULL; } NameDefaultPair* res = NULL; int mark = p->mark; - { // lambda_plain_name '=' expression - expr_ty e; + { // lambda_param default ',' + arg_ty a; + expr_ty c; Token * literal; - arg_ty n; if ( - (n = lambda_plain_name_rule(p)) + (a = lambda_param_rule(p)) && - (literal = _PyPegen_expect_token(p, 22)) + (c = default_rule(p)) && - (e = expression_rule(p)) + (literal = _PyPegen_expect_token(p, 12)) + ) + { + res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + { // lambda_param default &':' + arg_ty a; + expr_ty c; + if ( + (a = lambda_param_rule(p)) + && + (c = default_rule(p)) + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) ) { - res = _PyPegen_name_default_pair ( p , n , e , NULL ); + res = _PyPegen_name_default_pair ( p , a , c , NULL ); if (res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -5595,22 +5658,48 @@ lambda_name_with_default_rule(Parser *p) return res; } -// lambda_plain_names: ','.(lambda_plain_name !'=')+ -static asdl_seq* -lambda_plain_names_rule(Parser *p) +// lambda_param_maybe_default: lambda_param default? ',' | lambda_param default? &':' +static NameDefaultPair* +lambda_param_maybe_default_rule(Parser *p) { if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; + NameDefaultPair* res = NULL; int mark = p->mark; - { // ','.(lambda_plain_name !'=')+ - asdl_seq * a; + { // lambda_param default? ',' + arg_ty a; + void *c; + Token * literal; if ( - (a = _gather_91_rule(p)) + (a = lambda_param_rule(p)) + && + (c = default_rule(p), 1) + && + (literal = _PyPegen_expect_token(p, 12)) ) { - res = a; + res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + { // lambda_param default? &':' + arg_ty a; + void *c; + if ( + (a = lambda_param_rule(p)) + && + (c = default_rule(p), 1) + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + ) + { + res = _PyPegen_name_default_pair ( p , a , c , NULL ); if (res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -5624,9 +5713,9 @@ lambda_plain_names_rule(Parser *p) return res; } -// lambda_plain_name: NAME +// lambda_param: NAME static arg_ty -lambda_plain_name_rule(Parser *p) +lambda_param_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -5669,38 +5758,6 @@ lambda_plain_name_rule(Parser *p) return res; } -// lambda_kwds: '**' lambda_plain_name -static arg_ty -lambda_kwds_rule(Parser *p) -{ - if (p->error_indicator) { - return NULL; - } - arg_ty res = NULL; - int mark = p->mark; - { // '**' lambda_plain_name - arg_ty a; - Token * literal; - if ( - (literal = _PyPegen_expect_token(p, 35)) - && - (a = lambda_plain_name_rule(p)) - ) - { - res = a; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; - } - goto done; - } - p->mark = mark; - } - res = NULL; - done: - return res; -} - // disjunction: conjunction (('or' conjunction))+ | conjunction static expr_ty disjunction_rule(Parser *p) @@ -5726,7 +5783,7 @@ disjunction_rule(Parser *p) if ( (a = conjunction_rule(p)) && - (b = _loop1_93_rule(p)) + (b = _loop1_89_rule(p)) ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5788,7 +5845,7 @@ conjunction_rule(Parser *p) if ( (a = inversion_rule(p)) && - (b = _loop1_94_rule(p)) + (b = _loop1_90_rule(p)) ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5910,7 +5967,7 @@ comparison_rule(Parser *p) if ( (a = bitwise_or_rule(p)) && - (b = _loop1_95_rule(p)) + (b = _loop1_91_rule(p)) ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6122,10 +6179,10 @@ noteq_bitwise_or_rule(Parser *p) CmpopExprPair* res = NULL; int mark = p->mark; { // ('!=') bitwise_or - void *_tmp_96_var; + void *_tmp_92_var; expr_ty a; if ( - (_tmp_96_var = _tmp_96_rule(p)) + (_tmp_92_var = _tmp_92_rule(p)) && (a = bitwise_or_rule(p)) ) @@ -7567,7 +7624,7 @@ slices_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_97_rule(p)) + (a = _gather_93_rule(p)) && (opt_var = _PyPegen_expect_token(p, 12), 1) ) @@ -7623,7 +7680,7 @@ slice_rule(Parser *p) && (b = expression_rule(p), 1) && - (c = _tmp_99_rule(p), 1) + (c = _tmp_95_rule(p), 1) ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7811,40 +7868,40 @@ atom_rule(Parser *p) p->mark = mark; } { // &'(' (tuple | group | genexp) - void *_tmp_100_var; + void *_tmp_96_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) && - (_tmp_100_var = _tmp_100_rule(p)) + (_tmp_96_var = _tmp_96_rule(p)) ) { - res = _tmp_100_var; + res = _tmp_96_var; goto done; } p->mark = mark; } { // &'[' (list | listcomp) - void *_tmp_101_var; + void *_tmp_97_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) && - (_tmp_101_var = _tmp_101_rule(p)) + (_tmp_97_var = _tmp_97_rule(p)) ) { - res = _tmp_101_var; + res = _tmp_97_var; goto done; } p->mark = mark; } { // &'{' (dict | set | dictcomp | setcomp) - void *_tmp_102_var; + void *_tmp_98_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) && - (_tmp_102_var = _tmp_102_rule(p)) + (_tmp_98_var = _tmp_98_rule(p)) ) { - res = _tmp_102_var; + res = _tmp_98_var; goto done; } p->mark = mark; @@ -7891,7 +7948,7 @@ strings_rule(Parser *p) { // STRING+ asdl_seq * a; if ( - (a = _loop1_103_rule(p)) + (a = _loop1_99_rule(p)) ) { res = _PyPegen_concatenate_strings ( p , a ); @@ -8049,7 +8106,7 @@ tuple_rule(Parser *p) if ( (literal = _PyPegen_expect_token(p, 7)) && - (a = _tmp_104_rule(p), 1) + (a = _tmp_100_rule(p), 1) && (literal_1 = _PyPegen_expect_token(p, 8)) ) @@ -8092,7 +8149,7 @@ group_rule(Parser *p) if ( (literal = _PyPegen_expect_token(p, 7)) && - (a = _tmp_105_rule(p)) + (a = _tmp_101_rule(p)) && (literal_1 = _PyPegen_expect_token(p, 8)) ) @@ -8411,7 +8468,7 @@ kvpairs_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_106_rule(p)) + (a = _gather_102_rule(p)) && (opt_var = _PyPegen_expect_token(p, 12), 1) ) @@ -8493,12 +8550,12 @@ for_if_clauses_rule(Parser *p) asdl_seq* res = NULL; int mark = p->mark; { // for_if_clause+ - asdl_seq * _loop1_108_var; + asdl_seq * _loop1_104_var; if ( - (_loop1_108_var = _loop1_108_rule(p)) + (_loop1_104_var = _loop1_104_rule(p)) ) { - res = _loop1_108_var; + res = _loop1_104_var; goto done; } p->mark = mark; @@ -8537,7 +8594,7 @@ for_if_clause_rule(Parser *p) && (b = disjunction_rule(p)) && - (c = _loop0_109_rule(p)) + (c = _loop0_105_rule(p)) ) { res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); @@ -8564,7 +8621,7 @@ for_if_clause_rule(Parser *p) && (b = disjunction_rule(p)) && - (c = _loop0_110_rule(p)) + (c = _loop0_106_rule(p)) ) { res = _Py_comprehension ( a , b , c , 0 , p -> arena ); @@ -8730,7 +8787,7 @@ args_rule(Parser *p) if ( (a = starred_expression_rule(p)) && - (b = _tmp_111_rule(p), 1) + (b = _tmp_107_rule(p), 1) ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8779,7 +8836,7 @@ args_rule(Parser *p) if ( (a = named_expression_rule(p)) && - (b = _tmp_112_rule(p), 1) + (b = _tmp_108_rule(p), 1) ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8821,11 +8878,11 @@ kwargs_rule(Parser *p) asdl_seq * b; Token * literal; if ( - (a = _gather_113_rule(p)) + (a = _gather_109_rule(p)) && (literal = _PyPegen_expect_token(p, 12)) && - (b = _gather_115_rule(p)) + (b = _gather_111_rule(p)) ) { res = _PyPegen_join_sequences ( p , a , b ); @@ -8838,23 +8895,23 @@ kwargs_rule(Parser *p) p->mark = mark; } { // ','.kwarg_or_starred+ - asdl_seq * _gather_117_var; + asdl_seq * _gather_113_var; if ( - (_gather_117_var = _gather_117_rule(p)) + (_gather_113_var = _gather_113_rule(p)) ) { - res = _gather_117_var; + res = _gather_113_var; goto done; } p->mark = mark; } { // ','.kwarg_or_double_starred+ - asdl_seq * _gather_119_var; + asdl_seq * _gather_115_var; if ( - (_gather_119_var = _gather_119_rule(p)) + (_gather_115_var = _gather_115_rule(p)) ) { - res = _gather_119_var; + res = _gather_115_var; goto done; } p->mark = mark; @@ -9097,7 +9154,7 @@ star_targets_rule(Parser *p) if ( (a = star_target_rule(p)) && - (b = _loop0_121_rule(p)) + (b = _loop0_117_rule(p)) && (opt_var = _PyPegen_expect_token(p, 12), 1) ) @@ -9138,7 +9195,7 @@ star_targets_seq_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_122_rule(p)) + (a = _gather_118_rule(p)) && (opt_var = _PyPegen_expect_token(p, 12), 1) ) @@ -9186,7 +9243,7 @@ star_target_rule(Parser *p) if ( (literal = _PyPegen_expect_token(p, 16)) && - (a = _tmp_124_rule(p)) + (a = _tmp_120_rule(p)) ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9575,7 +9632,7 @@ del_targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_125_rule(p)) + (a = _gather_121_rule(p)) && (opt_var = _PyPegen_expect_token(p, 12), 1) ) @@ -9828,7 +9885,7 @@ targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_127_rule(p)) + (a = _gather_123_rule(p)) && (opt_var = _PyPegen_expect_token(p, 12), 1) ) @@ -10356,7 +10413,7 @@ incorrect_arguments_rule(Parser *p) && (literal = _PyPegen_expect_token(p, 12)) && - (opt_var = _tmp_129_rule(p), 1) + (opt_var = _tmp_125_rule(p), 1) ) { res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); @@ -10491,7 +10548,7 @@ invalid_assignment_rule(Parser *p) && (expression_var_1 = expression_rule(p)) && - (opt_var = _tmp_130_rule(p), 1) + (opt_var = _tmp_126_rule(p), 1) ) { res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); @@ -10504,15 +10561,15 @@ invalid_assignment_rule(Parser *p) p->mark = mark; } { // expression ('=' | augassign) (yield_expr | star_expressions) - void *_tmp_131_var; - void *_tmp_132_var; + void *_tmp_127_var; + void *_tmp_128_var; expr_ty a; if ( (a = expression_rule(p)) && - (_tmp_131_var = _tmp_131_rule(p)) + (_tmp_127_var = _tmp_127_rule(p)) && - (_tmp_132_var = _tmp_132_rule(p)) + (_tmp_128_var = _tmp_128_rule(p)) ) { res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); @@ -10570,12 +10627,12 @@ invalid_comprehension_rule(Parser *p) void * res = NULL; int mark = p->mark; { // ('[' | '(' | '{') '*' expression for_if_clauses - void *_tmp_133_var; + void *_tmp_129_var; expr_ty expression_var; asdl_seq* for_if_clauses_var; Token * literal; if ( - (_tmp_133_var = _tmp_133_rule(p)) + (_tmp_129_var = _tmp_129_rule(p)) && (literal = _PyPegen_expect_token(p, 16)) && @@ -10609,13 +10666,13 @@ invalid_parameters_rule(Parser *p) void * res = NULL; int mark = p->mark; { // param_no_default* (slash_with_default | param_with_default+) param_no_default - asdl_seq * _loop0_134_var; - void *_tmp_135_var; + asdl_seq * _loop0_130_var; + void *_tmp_131_var; arg_ty param_no_default_var; if ( - (_loop0_134_var = _loop0_134_rule(p)) + (_loop0_130_var = _loop0_130_rule(p)) && - (_tmp_135_var = _tmp_135_rule(p)) + (_tmp_131_var = _tmp_131_rule(p)) && (param_no_default_var = param_no_default_rule(p)) ) @@ -11570,12 +11627,12 @@ _loop1_22_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // (star_targets '=') - void *_tmp_136_var; + void *_tmp_132_var; while ( - (_tmp_136_var = _tmp_136_rule(p)) + (_tmp_132_var = _tmp_132_rule(p)) ) { - res = _tmp_136_var; + res = _tmp_132_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -11897,12 +11954,12 @@ _loop0_30_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('.' | '...') - void *_tmp_137_var; + void *_tmp_133_var; while ( - (_tmp_137_var = _tmp_137_rule(p)) + (_tmp_133_var = _tmp_133_rule(p)) ) { - res = _tmp_137_var; + res = _tmp_133_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -11946,12 +12003,12 @@ _loop1_31_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('.' | '...') - void *_tmp_138_var; + void *_tmp_134_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) + (_tmp_134_var = _tmp_134_rule(p)) ) { - res = _tmp_138_var; + res = _tmp_134_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -13528,12 +13585,12 @@ _loop1_67_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('@' named_expression NEWLINE) - void *_tmp_139_var; + void *_tmp_135_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) + (_tmp_135_var = _tmp_135_rule(p)) ) { - res = _tmp_139_var; + res = _tmp_135_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -13701,12 +13758,12 @@ _loop1_71_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // (',' star_expression) - void *_tmp_140_var; + void *_tmp_136_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) + (_tmp_136_var = _tmp_136_rule(p)) ) { - res = _tmp_140_var; + res = _tmp_136_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -13839,12 +13896,12 @@ _loop1_74_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // (',' expression) - void *_tmp_141_var; + void *_tmp_137_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) + (_tmp_137_var = _tmp_137_rule(p)) ) { - res = _tmp_141_var; + res = _tmp_137_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -13874,297 +13931,519 @@ _loop1_74_rule(Parser *p) return seq; } -// _tmp_75: ',' lambda_plain_names -static void * -_tmp_75_rule(Parser *p) +// _loop0_75: lambda_param_no_default +static asdl_seq * +_loop0_75_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + void *res = NULL; int mark = p->mark; - { // ',' lambda_plain_names - Token * literal; - asdl_seq* x; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (x = lambda_plain_names_rule(p)) + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_no_default + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) ) { - res = x; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; - } - goto done; - } + res = lambda_param_no_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } + } + children[n++] = res; + mark = p->mark; + } p->mark = mark; } - res = NULL; - done: - return res; + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_75"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop0_75_type, seq); + return seq; } -// _tmp_76: ',' lambda_names_with_default -static void * -_tmp_76_rule(Parser *p) +// _loop0_76: lambda_param_with_default +static asdl_seq * +_loop0_76_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + void *res = NULL; int mark = p->mark; - { // ',' lambda_names_with_default - Token * literal; - asdl_seq* y; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (y = lambda_names_with_default_rule(p)) + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_with_default + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) ) { - res = y; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; + res = lambda_param_with_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } } - goto done; + children[n++] = res; + mark = p->mark; } p->mark = mark; } - res = NULL; - done: - return res; + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_76"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop0_76_type, seq); + return seq; } -// _tmp_77: ',' lambda_star_etc? -static void * -_tmp_77_rule(Parser *p) +// _loop0_77: lambda_param_with_default +static asdl_seq * +_loop0_77_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + void *res = NULL; int mark = p->mark; - { // ',' lambda_star_etc? - Token * literal; - void *z; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (z = lambda_star_etc_rule(p), 1) + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_with_default + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) ) { - res = z; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; + res = lambda_param_with_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } } - goto done; + children[n++] = res; + mark = p->mark; } p->mark = mark; } - res = NULL; - done: - return res; + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_77"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop0_77_type, seq); + return seq; } -// _tmp_78: ',' lambda_names_with_default -static void * -_tmp_78_rule(Parser *p) +// _loop1_78: lambda_param_no_default +static asdl_seq * +_loop1_78_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + void *res = NULL; int mark = p->mark; - { // ',' lambda_names_with_default - Token * literal; - asdl_seq* y; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (y = lambda_names_with_default_rule(p)) + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_no_default + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) ) { - res = y; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; + res = lambda_param_no_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } } - goto done; + children[n++] = res; + mark = p->mark; } p->mark = mark; } - res = NULL; - done: - return res; + if (n == 0) { + PyMem_Free(children); + return NULL; + } + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_78"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop1_78_type, seq); + return seq; } -// _tmp_79: ',' lambda_star_etc? -static void * -_tmp_79_rule(Parser *p) +// _loop0_79: lambda_param_with_default +static asdl_seq * +_loop0_79_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + void *res = NULL; int mark = p->mark; - { // ',' lambda_star_etc? - Token * literal; - void *z; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (z = lambda_star_etc_rule(p), 1) + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_with_default + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) ) { - res = z; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; + res = lambda_param_with_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } } - goto done; + children[n++] = res; + mark = p->mark; } p->mark = mark; } - res = NULL; - done: - return res; + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_79"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop0_79_type, seq); + return seq; } -// _tmp_80: ',' lambda_names_with_default -static void * -_tmp_80_rule(Parser *p) +// _loop1_80: lambda_param_with_default +static asdl_seq * +_loop1_80_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + void *res = NULL; int mark = p->mark; - { // ',' lambda_names_with_default - Token * literal; - asdl_seq* y; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (y = lambda_names_with_default_rule(p)) + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_with_default + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) ) { - res = y; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; + res = lambda_param_with_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } } - goto done; + children[n++] = res; + mark = p->mark; } p->mark = mark; } - res = NULL; - done: - return res; + if (n == 0) { + PyMem_Free(children); + return NULL; + } + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_80"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop1_80_type, seq); + return seq; } -// _tmp_81: ',' lambda_star_etc? -static void * -_tmp_81_rule(Parser *p) +// _loop1_81: lambda_param_no_default +static asdl_seq * +_loop1_81_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + void *res = NULL; int mark = p->mark; - { // ',' lambda_star_etc? - Token * literal; - void *z; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (z = lambda_star_etc_rule(p), 1) + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_no_default + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) ) { - res = z; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; + res = lambda_param_no_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } } - goto done; + children[n++] = res; + mark = p->mark; } p->mark = mark; } - res = NULL; - done: - return res; + if (n == 0) { + PyMem_Free(children); + return NULL; + } + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_81"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop1_81_type, seq); + return seq; } -// _tmp_82: ',' lambda_star_etc? -static void * -_tmp_82_rule(Parser *p) +// _loop1_82: lambda_param_no_default +static asdl_seq * +_loop1_82_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + void *res = NULL; int mark = p->mark; - { // ',' lambda_star_etc? - Token * literal; - void *z; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (z = lambda_star_etc_rule(p), 1) + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_no_default + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) ) { - res = z; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; + res = lambda_param_no_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } } - goto done; + children[n++] = res; + mark = p->mark; } p->mark = mark; } - res = NULL; - done: - return res; + if (n == 0) { + PyMem_Free(children); + return NULL; + } + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_82"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop1_82_type, seq); + return seq; } -// _tmp_83: lambda_plain_names ',' -static void * -_tmp_83_rule(Parser *p) +// _loop0_83: lambda_param_no_default +static asdl_seq * +_loop0_83_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + void *res = NULL; int mark = p->mark; - { // lambda_plain_names ',' - Token * literal; - asdl_seq* n; - if ( - (n = lambda_plain_names_rule(p)) - && - (literal = _PyPegen_expect_token(p, 12)) + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_no_default + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) ) { - res = n; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; + res = lambda_param_no_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } } - goto done; + children[n++] = res; + mark = p->mark; } p->mark = mark; } - res = NULL; - done: - return res; + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_83"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop0_83_type, seq); + return seq; +} + +// _loop1_84: lambda_param_with_default +static asdl_seq * +_loop1_84_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void *res = NULL; + int mark = p->mark; + int start_mark = p->mark; + void **children = PyMem_Malloc(sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t children_capacity = 1; + ssize_t n = 0; + { // lambda_param_with_default + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + ) + { + res = lambda_param_with_default_var; + if (n == children_capacity) { + children_capacity *= 2; + children = PyMem_Realloc(children, children_capacity*sizeof(void *)); + if (!children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } + } + children[n++] = res; + mark = p->mark; + } + p->mark = mark; + } + if (n == 0) { + PyMem_Free(children); + return NULL; + } + asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); + if (!seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_84"); + PyMem_Free(children); + return NULL; + } + for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); + PyMem_Free(children); + _PyPegen_insert_memo(p, start_mark, _loop1_84_type, seq); + return seq; } -// _loop0_84: lambda_name_with_optional_default +// _loop0_85: lambda_param_no_default static asdl_seq * -_loop0_84_rule(Parser *p) +_loop0_85_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14179,13 +14458,13 @@ _loop0_84_rule(Parser *p) } ssize_t children_capacity = 1; ssize_t n = 0; - { // lambda_name_with_optional_default - NameDefaultPair* lambda_name_with_optional_default_var; + { // lambda_param_no_default + arg_ty lambda_param_no_default_var; while ( - (lambda_name_with_optional_default_var = lambda_name_with_optional_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) ) { - res = lambda_name_with_optional_default_var; + res = lambda_param_no_default_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -14201,49 +14480,17 @@ _loop0_84_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_84"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_85"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_84_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_85_type, seq); return seq; } -// _tmp_85: ',' lambda_kwds -static void * -_tmp_85_rule(Parser *p) -{ - if (p->error_indicator) { - return NULL; - } - void * res = NULL; - int mark = p->mark; - { // ',' lambda_kwds - arg_ty d; - Token * literal; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (d = lambda_kwds_rule(p)) - ) - { - res = d; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; - } - goto done; - } - p->mark = mark; - } - res = NULL; - done: - return res; -} - -// _loop1_86: lambda_name_with_optional_default +// _loop1_86: lambda_param_with_default static asdl_seq * _loop1_86_rule(Parser *p) { @@ -14260,13 +14507,13 @@ _loop1_86_rule(Parser *p) } ssize_t children_capacity = 1; ssize_t n = 0; - { // lambda_name_with_optional_default - NameDefaultPair* lambda_name_with_optional_default_var; + { // lambda_param_with_default + NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_name_with_optional_default_var = lambda_name_with_optional_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) ) { - res = lambda_name_with_optional_default_var; + res = lambda_param_with_default_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -14296,73 +14543,9 @@ _loop1_86_rule(Parser *p) return seq; } -// _tmp_87: ',' lambda_kwds -static void * -_tmp_87_rule(Parser *p) -{ - if (p->error_indicator) { - return NULL; - } - void * res = NULL; - int mark = p->mark; - { // ',' lambda_kwds - arg_ty d; - Token * literal; - if ( - (literal = _PyPegen_expect_token(p, 12)) - && - (d = lambda_kwds_rule(p)) - ) - { - res = d; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; - } - goto done; - } - p->mark = mark; - } - res = NULL; - done: - return res; -} - -// _tmp_88: '=' expression -static void * -_tmp_88_rule(Parser *p) -{ - if (p->error_indicator) { - return NULL; - } - void * res = NULL; - int mark = p->mark; - { // '=' expression - expr_ty e; - Token * literal; - if ( - (literal = _PyPegen_expect_token(p, 22)) - && - (e = expression_rule(p)) - ) - { - res = e; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - return NULL; - } - goto done; - } - p->mark = mark; - } - res = NULL; - done: - return res; -} - -// _loop0_90: ',' lambda_name_with_default +// _loop0_87: lambda_param_maybe_default static asdl_seq * -_loop0_90_rule(Parser *p) +_loop0_87_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14377,21 +14560,13 @@ _loop0_90_rule(Parser *p) } ssize_t children_capacity = 1; ssize_t n = 0; - { // ',' lambda_name_with_default - NameDefaultPair* elem; - Token * literal; + { // lambda_param_maybe_default + NameDefaultPair* lambda_param_maybe_default_var; while ( - (literal = _PyPegen_expect_token(p, 12)) - && - (elem = lambda_name_with_default_rule(p)) + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(children); - return NULL; - } + res = lambda_param_maybe_default_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -14407,47 +14582,19 @@ _loop0_90_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_90"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_87"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_90_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_87_type, seq); return seq; } -// _gather_89: lambda_name_with_default _loop0_90 -static asdl_seq * -_gather_89_rule(Parser *p) -{ - if (p->error_indicator) { - return NULL; - } - asdl_seq * res = NULL; - int mark = p->mark; - { // lambda_name_with_default _loop0_90 - NameDefaultPair* elem; - asdl_seq * seq; - if ( - (elem = lambda_name_with_default_rule(p)) - && - (seq = _loop0_90_rule(p)) - ) - { - res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = mark; - } - res = NULL; - done: - return res; -} - -// _loop0_92: ',' (lambda_plain_name !'=') +// _loop1_88: lambda_param_maybe_default static asdl_seq * -_loop0_92_rule(Parser *p) +_loop1_88_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14462,21 +14609,13 @@ _loop0_92_rule(Parser *p) } ssize_t children_capacity = 1; ssize_t n = 0; - { // ',' (lambda_plain_name !'=') - void *elem; - Token * literal; + { // lambda_param_maybe_default + NameDefaultPair* lambda_param_maybe_default_var; while ( - (literal = _PyPegen_expect_token(p, 12)) - && - (elem = _tmp_142_rule(p)) + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(children); - return NULL; - } + res = lambda_param_maybe_default_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -14490,49 +14629,25 @@ _loop0_92_rule(Parser *p) } p->mark = mark; } + if (n == 0) { + PyMem_Free(children); + return NULL; + } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_92"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_88"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_92_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop1_88_type, seq); return seq; } -// _gather_91: (lambda_plain_name !'=') _loop0_92 -static asdl_seq * -_gather_91_rule(Parser *p) -{ - if (p->error_indicator) { - return NULL; - } - asdl_seq * res = NULL; - int mark = p->mark; - { // (lambda_plain_name !'=') _loop0_92 - void *elem; - asdl_seq * seq; - if ( - (elem = _tmp_142_rule(p)) - && - (seq = _loop0_92_rule(p)) - ) - { - res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = mark; - } - res = NULL; - done: - return res; -} - -// _loop1_93: ('or' conjunction) +// _loop1_89: ('or' conjunction) static asdl_seq * -_loop1_93_rule(Parser *p) +_loop1_89_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14548,12 +14663,12 @@ _loop1_93_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('or' conjunction) - void *_tmp_143_var; + void *_tmp_138_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) + (_tmp_138_var = _tmp_138_rule(p)) ) { - res = _tmp_143_var; + res = _tmp_138_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -14573,19 +14688,19 @@ _loop1_93_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_93"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_89"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_93_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop1_89_type, seq); return seq; } -// _loop1_94: ('and' inversion) +// _loop1_90: ('and' inversion) static asdl_seq * -_loop1_94_rule(Parser *p) +_loop1_90_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14601,12 +14716,12 @@ _loop1_94_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('and' inversion) - void *_tmp_144_var; + void *_tmp_139_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) + (_tmp_139_var = _tmp_139_rule(p)) ) { - res = _tmp_144_var; + res = _tmp_139_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -14626,19 +14741,19 @@ _loop1_94_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_94"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_90"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_94_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop1_90_type, seq); return seq; } -// _loop1_95: compare_op_bitwise_or_pair +// _loop1_91: compare_op_bitwise_or_pair static asdl_seq * -_loop1_95_rule(Parser *p) +_loop1_91_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14679,19 +14794,19 @@ _loop1_95_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_95"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_91"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_95_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop1_91_type, seq); return seq; } -// _tmp_96: '!=' +// _tmp_92: '!=' static void * -_tmp_96_rule(Parser *p) +_tmp_92_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14718,9 +14833,9 @@ _tmp_96_rule(Parser *p) return res; } -// _loop0_98: ',' slice +// _loop0_94: ',' slice static asdl_seq * -_loop0_98_rule(Parser *p) +_loop0_94_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14765,32 +14880,32 @@ _loop0_98_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_98"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_94"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_98_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_94_type, seq); return seq; } -// _gather_97: slice _loop0_98 +// _gather_93: slice _loop0_94 static asdl_seq * -_gather_97_rule(Parser *p) +_gather_93_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq * res = NULL; int mark = p->mark; - { // slice _loop0_98 + { // slice _loop0_94 expr_ty elem; asdl_seq * seq; if ( (elem = slice_rule(p)) && - (seq = _loop0_98_rule(p)) + (seq = _loop0_94_rule(p)) ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -14803,9 +14918,9 @@ _gather_97_rule(Parser *p) return res; } -// _tmp_99: ':' expression? +// _tmp_95: ':' expression? static void * -_tmp_99_rule(Parser *p) +_tmp_95_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14835,9 +14950,9 @@ _tmp_99_rule(Parser *p) return res; } -// _tmp_100: tuple | group | genexp +// _tmp_96: tuple | group | genexp static void * -_tmp_100_rule(Parser *p) +_tmp_96_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14882,9 +14997,9 @@ _tmp_100_rule(Parser *p) return res; } -// _tmp_101: list | listcomp +// _tmp_97: list | listcomp static void * -_tmp_101_rule(Parser *p) +_tmp_97_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14918,9 +15033,9 @@ _tmp_101_rule(Parser *p) return res; } -// _tmp_102: dict | set | dictcomp | setcomp +// _tmp_98: dict | set | dictcomp | setcomp static void * -_tmp_102_rule(Parser *p) +_tmp_98_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -14976,9 +15091,9 @@ _tmp_102_rule(Parser *p) return res; } -// _loop1_103: STRING +// _loop1_99: STRING static asdl_seq * -_loop1_103_rule(Parser *p) +_loop1_99_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15019,19 +15134,19 @@ _loop1_103_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_103"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_99"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_103_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop1_99_type, seq); return seq; } -// _tmp_104: star_named_expression ',' star_named_expressions? +// _tmp_100: star_named_expression ',' star_named_expressions? static void * -_tmp_104_rule(Parser *p) +_tmp_100_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15064,9 +15179,9 @@ _tmp_104_rule(Parser *p) return res; } -// _tmp_105: yield_expr | named_expression +// _tmp_101: yield_expr | named_expression static void * -_tmp_105_rule(Parser *p) +_tmp_101_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15100,9 +15215,9 @@ _tmp_105_rule(Parser *p) return res; } -// _loop0_107: ',' kvpair +// _loop0_103: ',' kvpair static asdl_seq * -_loop0_107_rule(Parser *p) +_loop0_103_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15147,32 +15262,32 @@ _loop0_107_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_107"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_103"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_107_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_103_type, seq); return seq; } -// _gather_106: kvpair _loop0_107 +// _gather_102: kvpair _loop0_103 static asdl_seq * -_gather_106_rule(Parser *p) +_gather_102_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq * res = NULL; int mark = p->mark; - { // kvpair _loop0_107 + { // kvpair _loop0_103 KeyValuePair* elem; asdl_seq * seq; if ( (elem = kvpair_rule(p)) && - (seq = _loop0_107_rule(p)) + (seq = _loop0_103_rule(p)) ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15185,9 +15300,9 @@ _gather_106_rule(Parser *p) return res; } -// _loop1_108: for_if_clause +// _loop1_104: for_if_clause static asdl_seq * -_loop1_108_rule(Parser *p) +_loop1_104_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15228,19 +15343,19 @@ _loop1_108_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_108"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_104"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_108_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop1_104_type, seq); return seq; } -// _loop0_109: ('if' disjunction) +// _loop0_105: ('if' disjunction) static asdl_seq * -_loop0_109_rule(Parser *p) +_loop0_105_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15256,12 +15371,12 @@ _loop0_109_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('if' disjunction) - void *_tmp_145_var; + void *_tmp_140_var; while ( - (_tmp_145_var = _tmp_145_rule(p)) + (_tmp_140_var = _tmp_140_rule(p)) ) { - res = _tmp_145_var; + res = _tmp_140_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -15277,19 +15392,19 @@ _loop0_109_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_109"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_105"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_109_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_105_type, seq); return seq; } -// _loop0_110: ('if' disjunction) +// _loop0_106: ('if' disjunction) static asdl_seq * -_loop0_110_rule(Parser *p) +_loop0_106_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15305,12 +15420,12 @@ _loop0_110_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('if' disjunction) - void *_tmp_146_var; + void *_tmp_141_var; while ( - (_tmp_146_var = _tmp_146_rule(p)) + (_tmp_141_var = _tmp_141_rule(p)) ) { - res = _tmp_146_var; + res = _tmp_141_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -15326,19 +15441,19 @@ _loop0_110_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_110"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_106"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_110_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_106_type, seq); return seq; } -// _tmp_111: ',' args +// _tmp_107: ',' args static void * -_tmp_111_rule(Parser *p) +_tmp_107_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15368,9 +15483,9 @@ _tmp_111_rule(Parser *p) return res; } -// _tmp_112: ',' args +// _tmp_108: ',' args static void * -_tmp_112_rule(Parser *p) +_tmp_108_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15400,9 +15515,9 @@ _tmp_112_rule(Parser *p) return res; } -// _loop0_114: ',' kwarg_or_starred +// _loop0_110: ',' kwarg_or_starred static asdl_seq * -_loop0_114_rule(Parser *p) +_loop0_110_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15447,32 +15562,32 @@ _loop0_114_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_114"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_110"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_114_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_110_type, seq); return seq; } -// _gather_113: kwarg_or_starred _loop0_114 +// _gather_109: kwarg_or_starred _loop0_110 static asdl_seq * -_gather_113_rule(Parser *p) +_gather_109_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq * res = NULL; int mark = p->mark; - { // kwarg_or_starred _loop0_114 + { // kwarg_or_starred _loop0_110 KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_starred_rule(p)) && - (seq = _loop0_114_rule(p)) + (seq = _loop0_110_rule(p)) ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15485,9 +15600,9 @@ _gather_113_rule(Parser *p) return res; } -// _loop0_116: ',' kwarg_or_double_starred +// _loop0_112: ',' kwarg_or_double_starred static asdl_seq * -_loop0_116_rule(Parser *p) +_loop0_112_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15532,32 +15647,32 @@ _loop0_116_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_116"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_112"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_116_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_112_type, seq); return seq; } -// _gather_115: kwarg_or_double_starred _loop0_116 +// _gather_111: kwarg_or_double_starred _loop0_112 static asdl_seq * -_gather_115_rule(Parser *p) +_gather_111_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq * res = NULL; int mark = p->mark; - { // kwarg_or_double_starred _loop0_116 + { // kwarg_or_double_starred _loop0_112 KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_double_starred_rule(p)) && - (seq = _loop0_116_rule(p)) + (seq = _loop0_112_rule(p)) ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15570,9 +15685,9 @@ _gather_115_rule(Parser *p) return res; } -// _loop0_118: ',' kwarg_or_starred +// _loop0_114: ',' kwarg_or_starred static asdl_seq * -_loop0_118_rule(Parser *p) +_loop0_114_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15617,32 +15732,32 @@ _loop0_118_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_118"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_114"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_118_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_114_type, seq); return seq; } -// _gather_117: kwarg_or_starred _loop0_118 +// _gather_113: kwarg_or_starred _loop0_114 static asdl_seq * -_gather_117_rule(Parser *p) +_gather_113_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq * res = NULL; int mark = p->mark; - { // kwarg_or_starred _loop0_118 + { // kwarg_or_starred _loop0_114 KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_starred_rule(p)) && - (seq = _loop0_118_rule(p)) + (seq = _loop0_114_rule(p)) ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15655,9 +15770,9 @@ _gather_117_rule(Parser *p) return res; } -// _loop0_120: ',' kwarg_or_double_starred +// _loop0_116: ',' kwarg_or_double_starred static asdl_seq * -_loop0_120_rule(Parser *p) +_loop0_116_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15702,32 +15817,32 @@ _loop0_120_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_120"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_116"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_120_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_116_type, seq); return seq; } -// _gather_119: kwarg_or_double_starred _loop0_120 +// _gather_115: kwarg_or_double_starred _loop0_116 static asdl_seq * -_gather_119_rule(Parser *p) +_gather_115_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq * res = NULL; int mark = p->mark; - { // kwarg_or_double_starred _loop0_120 + { // kwarg_or_double_starred _loop0_116 KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_double_starred_rule(p)) && - (seq = _loop0_120_rule(p)) + (seq = _loop0_116_rule(p)) ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15740,9 +15855,9 @@ _gather_119_rule(Parser *p) return res; } -// _loop0_121: (',' star_target) +// _loop0_117: (',' star_target) static asdl_seq * -_loop0_121_rule(Parser *p) +_loop0_117_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15758,12 +15873,12 @@ _loop0_121_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // (',' star_target) - void *_tmp_147_var; + void *_tmp_142_var; while ( - (_tmp_147_var = _tmp_147_rule(p)) + (_tmp_142_var = _tmp_142_rule(p)) ) { - res = _tmp_147_var; + res = _tmp_142_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -15779,19 +15894,19 @@ _loop0_121_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_121"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_117"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_121_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_117_type, seq); return seq; } -// _loop0_123: ',' star_target +// _loop0_119: ',' star_target static asdl_seq * -_loop0_123_rule(Parser *p) +_loop0_119_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15836,32 +15951,32 @@ _loop0_123_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_123"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_119"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_123_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_119_type, seq); return seq; } -// _gather_122: star_target _loop0_123 +// _gather_118: star_target _loop0_119 static asdl_seq * -_gather_122_rule(Parser *p) +_gather_118_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq * res = NULL; int mark = p->mark; - { // star_target _loop0_123 + { // star_target _loop0_119 expr_ty elem; asdl_seq * seq; if ( (elem = star_target_rule(p)) && - (seq = _loop0_123_rule(p)) + (seq = _loop0_119_rule(p)) ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15874,9 +15989,9 @@ _gather_122_rule(Parser *p) return res; } -// _tmp_124: !'*' star_target +// _tmp_120: !'*' star_target static void * -_tmp_124_rule(Parser *p) +_tmp_120_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15901,9 +16016,9 @@ _tmp_124_rule(Parser *p) return res; } -// _loop0_126: ',' del_target +// _loop0_122: ',' del_target static asdl_seq * -_loop0_126_rule(Parser *p) +_loop0_122_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -15948,32 +16063,32 @@ _loop0_126_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_126"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_122"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_126_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_122_type, seq); return seq; } -// _gather_125: del_target _loop0_126 +// _gather_121: del_target _loop0_122 static asdl_seq * -_gather_125_rule(Parser *p) +_gather_121_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq * res = NULL; int mark = p->mark; - { // del_target _loop0_126 + { // del_target _loop0_122 expr_ty elem; asdl_seq * seq; if ( (elem = del_target_rule(p)) && - (seq = _loop0_126_rule(p)) + (seq = _loop0_122_rule(p)) ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15986,9 +16101,9 @@ _gather_125_rule(Parser *p) return res; } -// _loop0_128: ',' target +// _loop0_124: ',' target static asdl_seq * -_loop0_128_rule(Parser *p) +_loop0_124_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16033,32 +16148,32 @@ _loop0_128_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_128"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_124"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_128_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_124_type, seq); return seq; } -// _gather_127: target _loop0_128 +// _gather_123: target _loop0_124 static asdl_seq * -_gather_127_rule(Parser *p) +_gather_123_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq * res = NULL; int mark = p->mark; - { // target _loop0_128 + { // target _loop0_124 expr_ty elem; asdl_seq * seq; if ( (elem = target_rule(p)) && - (seq = _loop0_128_rule(p)) + (seq = _loop0_124_rule(p)) ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -16071,9 +16186,9 @@ _gather_127_rule(Parser *p) return res; } -// _tmp_129: args | expression for_if_clauses +// _tmp_125: args | expression for_if_clauses static void * -_tmp_129_rule(Parser *p) +_tmp_125_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16110,9 +16225,9 @@ _tmp_129_rule(Parser *p) return res; } -// _tmp_130: '=' annotated_rhs +// _tmp_126: '=' annotated_rhs static void * -_tmp_130_rule(Parser *p) +_tmp_126_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16138,9 +16253,9 @@ _tmp_130_rule(Parser *p) return res; } -// _tmp_131: '=' | augassign +// _tmp_127: '=' | augassign static void * -_tmp_131_rule(Parser *p) +_tmp_127_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16174,9 +16289,9 @@ _tmp_131_rule(Parser *p) return res; } -// _tmp_132: yield_expr | star_expressions +// _tmp_128: yield_expr | star_expressions static void * -_tmp_132_rule(Parser *p) +_tmp_128_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16210,9 +16325,9 @@ _tmp_132_rule(Parser *p) return res; } -// _tmp_133: '[' | '(' | '{' +// _tmp_129: '[' | '(' | '{' static void * -_tmp_133_rule(Parser *p) +_tmp_129_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16257,9 +16372,9 @@ _tmp_133_rule(Parser *p) return res; } -// _loop0_134: param_no_default +// _loop0_130: param_no_default static asdl_seq * -_loop0_134_rule(Parser *p) +_loop0_130_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16296,19 +16411,19 @@ _loop0_134_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_134"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_130"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_134_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop0_130_type, seq); return seq; } -// _tmp_135: slash_with_default | param_with_default+ +// _tmp_131: slash_with_default | param_with_default+ static void * -_tmp_135_rule(Parser *p) +_tmp_131_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16327,12 +16442,12 @@ _tmp_135_rule(Parser *p) p->mark = mark; } { // param_with_default+ - asdl_seq * _loop1_148_var; + asdl_seq * _loop1_143_var; if ( - (_loop1_148_var = _loop1_148_rule(p)) + (_loop1_143_var = _loop1_143_rule(p)) ) { - res = _loop1_148_var; + res = _loop1_143_var; goto done; } p->mark = mark; @@ -16342,9 +16457,9 @@ _tmp_135_rule(Parser *p) return res; } -// _tmp_136: star_targets '=' +// _tmp_132: star_targets '=' static void * -_tmp_136_rule(Parser *p) +_tmp_132_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16374,9 +16489,9 @@ _tmp_136_rule(Parser *p) return res; } -// _tmp_137: '.' | '...' +// _tmp_133: '.' | '...' static void * -_tmp_137_rule(Parser *p) +_tmp_133_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16410,9 +16525,9 @@ _tmp_137_rule(Parser *p) return res; } -// _tmp_138: '.' | '...' +// _tmp_134: '.' | '...' static void * -_tmp_138_rule(Parser *p) +_tmp_134_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16446,9 +16561,9 @@ _tmp_138_rule(Parser *p) return res; } -// _tmp_139: '@' named_expression NEWLINE +// _tmp_135: '@' named_expression NEWLINE static void * -_tmp_139_rule(Parser *p) +_tmp_135_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16481,9 +16596,9 @@ _tmp_139_rule(Parser *p) return res; } -// _tmp_140: ',' star_expression +// _tmp_136: ',' star_expression static void * -_tmp_140_rule(Parser *p) +_tmp_136_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16513,9 +16628,9 @@ _tmp_140_rule(Parser *p) return res; } -// _tmp_141: ',' expression +// _tmp_137: ',' expression static void * -_tmp_141_rule(Parser *p) +_tmp_137_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16545,36 +16660,9 @@ _tmp_141_rule(Parser *p) return res; } -// _tmp_142: lambda_plain_name !'=' -static void * -_tmp_142_rule(Parser *p) -{ - if (p->error_indicator) { - return NULL; - } - void * res = NULL; - int mark = p->mark; - { // lambda_plain_name !'=' - arg_ty lambda_plain_name_var; - if ( - (lambda_plain_name_var = lambda_plain_name_rule(p)) - && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) - ) - { - res = lambda_plain_name_var; - goto done; - } - p->mark = mark; - } - res = NULL; - done: - return res; -} - -// _tmp_143: 'or' conjunction +// _tmp_138: 'or' conjunction static void * -_tmp_143_rule(Parser *p) +_tmp_138_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16604,9 +16692,9 @@ _tmp_143_rule(Parser *p) return res; } -// _tmp_144: 'and' inversion +// _tmp_139: 'and' inversion static void * -_tmp_144_rule(Parser *p) +_tmp_139_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16636,9 +16724,9 @@ _tmp_144_rule(Parser *p) return res; } -// _tmp_145: 'if' disjunction +// _tmp_140: 'if' disjunction static void * -_tmp_145_rule(Parser *p) +_tmp_140_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16668,9 +16756,9 @@ _tmp_145_rule(Parser *p) return res; } -// _tmp_146: 'if' disjunction +// _tmp_141: 'if' disjunction static void * -_tmp_146_rule(Parser *p) +_tmp_141_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16700,9 +16788,9 @@ _tmp_146_rule(Parser *p) return res; } -// _tmp_147: ',' star_target +// _tmp_142: ',' star_target static void * -_tmp_147_rule(Parser *p) +_tmp_142_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16732,9 +16820,9 @@ _tmp_147_rule(Parser *p) return res; } -// _loop1_148: param_with_default +// _loop1_143: param_with_default static asdl_seq * -_loop1_148_rule(Parser *p) +_loop1_143_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16775,13 +16863,13 @@ _loop1_148_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_148"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_143"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_148_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop1_143_type, seq); return seq; } From webhook-mailer at python.org Fri May 1 12:42:36 2020 From: webhook-mailer at python.org (Guido van Rossum) Date: Fri, 01 May 2020 16:42:36 -0000 Subject: [Python-checkins] Ensure that tok->type_comments is set on every path (GH-19828) Message-ID: https://github.com/python/cpython/commit/d9d6eadf003605f4cdb55e38df2168dd1bc0dbd5 commit: d9d6eadf003605f4cdb55e38df2168dd1bc0dbd5 branch: master author: Guido van Rossum committer: GitHub date: 2020-05-01T17:42:32+01:00 summary: Ensure that tok->type_comments is set on every path (GH-19828) files: M Parser/pegen/pegen.c diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index a7add8fbb144e..6ff09b3b31f78 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -1017,6 +1017,8 @@ _PyPegen_Parser_New(struct tok_state *tok, int start_rule, int flags, return (Parser *) PyErr_NoMemory(); } assert(tok != NULL); + tok->type_comments = (flags & PyPARSE_TYPE_COMMENTS) > 0; + tok->async_hacks = (flags & PyPARSE_ASYNC_HACKS) > 0; p->tok = tok; p->keywords = NULL; p->n_keyword_lists = -1; @@ -1172,9 +1174,6 @@ _PyPegen_run_parser_from_string(const char *str, int start_rule, PyObject *filen int parser_flags = compute_parser_flags(flags); int feature_version = flags ? flags->cf_feature_version : PY_MINOR_VERSION; - tok->type_comments = (parser_flags & PyPARSE_TYPE_COMMENTS) > 0; - tok->async_hacks = (parser_flags & PyPARSE_ASYNC_HACKS) > 0; - Parser *p = _PyPegen_Parser_New(tok, start_rule, parser_flags, feature_version, NULL, arena); if (p == NULL) { From webhook-mailer at python.org Fri May 1 13:31:00 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Fri, 01 May 2020 17:31:00 -0000 Subject: [Python-checkins] bpo-40334: Make the PyPegen* and PyParser* APIs more consistent (GH-19839) Message-ID: https://github.com/python/cpython/commit/03b7642265e65f198682f22648dbe6cf4fff9835 commit: 03b7642265e65f198682f22648dbe6cf4fff9835 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-01T18:30:51+01:00 summary: bpo-40334: Make the PyPegen* and PyParser* APIs more consistent (GH-19839) This commit makes both APIs more consistent by doing the following: - Remove the `PyPegen_CodeObjectFrom*` functions, which weren't used and will probably not be needed. Functions like `Py_CompileStringObject` can be used instead. - Include a `const char *filename` parameter in `PyPegen_ASTFromString`. - Rename `PyPegen_ASTFromFile` to `PyPegen_ASTFromFilename`, because its signature is not the same with `PyParser_ASTFromFile`. files: M Include/internal/pegen_interface.h M Modules/_peg_parser.c M Parser/pegen/peg_api.c diff --git a/Include/internal/pegen_interface.h b/Include/internal/pegen_interface.h index adff7315681e3..ee4c77ec00676 100644 --- a/Include/internal/pegen_interface.h +++ b/Include/internal/pegen_interface.h @@ -11,25 +11,34 @@ extern "C" { #include "Python.h" #include "Python-ast.h" -PyAPI_FUNC(mod_ty) PyPegen_ASTFromFile(const char *filename, int mode, PyCompilerFlags*, PyArena *arena); -PyAPI_FUNC(mod_ty) PyPegen_ASTFromString(const char *str, int mode, PyCompilerFlags *flags, - PyArena *arena); -PyAPI_FUNC(mod_ty) PyPegen_ASTFromStringObject(const char *str, PyObject* filename, int mode, - PyCompilerFlags *flags, PyArena *arena); -PyAPI_FUNC(mod_ty) PyPegen_ASTFromFileObject(FILE *fp, PyObject *filename_ob, - int mode, const char *enc, const char *ps1, - const char *ps2, PyCompilerFlags *flags, - int *errcode, PyArena *arena); -PyAPI_FUNC(PyCodeObject *) PyPegen_CodeObjectFromFile(const char *filename, int mode, PyCompilerFlags *flags); -PyAPI_FUNC(PyCodeObject *) PyPegen_CodeObjectFromString(const char *str, int mode, - PyCompilerFlags *flags); -PyAPI_FUNC(PyCodeObject *) PyPegen_CodeObjectFromFileObject(FILE *, PyObject *filename_ob, - int mode, - const char *ps1, - const char *ps2, - PyCompilerFlags *flags, - const char *enc, - int *errcode); +PyAPI_FUNC(mod_ty) PyPegen_ASTFromString( + const char *str, + const char *filename, + int mode, + PyCompilerFlags *flags, + PyArena *arena); +PyAPI_FUNC(mod_ty) PyPegen_ASTFromStringObject( + const char *str, + PyObject* filename, + int mode, + PyCompilerFlags *flags, + PyArena *arena); +PyAPI_FUNC(mod_ty) PyPegen_ASTFromFileObject( + FILE *fp, + PyObject *filename_ob, + int mode, + const char *enc, + const char *ps1, + const char *ps2, + PyCompilerFlags *flags, + int *errcode, + PyArena *arena); +PyAPI_FUNC(mod_ty) PyPegen_ASTFromFilename( + const char *filename, + int mode, + PyCompilerFlags *flags, + PyArena *arena); + #ifdef __cplusplus } diff --git a/Modules/_peg_parser.c b/Modules/_peg_parser.c index 59b80f9e06e9e..3b27b2c9cbaa2 100644 --- a/Modules/_peg_parser.c +++ b/Modules/_peg_parser.c @@ -31,7 +31,7 @@ _Py_parse_file(PyObject *self, PyObject *args, PyObject *kwds) PyCompilerFlags flags = _PyCompilerFlags_INIT; PyObject *result = NULL; - mod_ty res = PyPegen_ASTFromFile(filename, mode, &flags, arena); + mod_ty res = PyPegen_ASTFromFilename(filename, mode, &flags, arena); if (res == NULL) { goto error; } @@ -84,7 +84,7 @@ _Py_parse_string(PyObject *self, PyObject *args, PyObject *kwds) res = PyParser_ASTFromString(the_string, "", mode, &flags, arena); } else { - res = PyPegen_ASTFromString(the_string, mode, &flags, arena); + res = PyPegen_ASTFromString(the_string, "", mode, &flags, arena); } if (res == NULL) { goto error; diff --git a/Parser/pegen/peg_api.c b/Parser/pegen/peg_api.c index 31ac2e1399265..5e71ecdb13cf0 100644 --- a/Parser/pegen/peg_api.c +++ b/Parser/pegen/peg_api.c @@ -4,9 +4,10 @@ #include "pegen.h" mod_ty -PyPegen_ASTFromString(const char *str, int mode, PyCompilerFlags *flags, PyArena *arena) +PyPegen_ASTFromString(const char *str, const char *filename, int mode, + PyCompilerFlags *flags, PyArena *arena) { - PyObject *filename_ob = PyUnicode_FromString(""); + PyObject *filename_ob = PyUnicode_FromString(filename); if (filename_ob == NULL) { return NULL; } @@ -16,7 +17,8 @@ PyPegen_ASTFromString(const char *str, int mode, PyCompilerFlags *flags, PyArena } mod_ty -PyPegen_ASTFromStringObject(const char *str, PyObject* filename, int mode, PyCompilerFlags *flags, PyArena *arena) +PyPegen_ASTFromStringObject(const char *str, PyObject* filename, int mode, + PyCompilerFlags *flags, PyArena *arena) { if (PySys_Audit("compile", "yO", str, filename) < 0) { return NULL; @@ -27,7 +29,7 @@ PyPegen_ASTFromStringObject(const char *str, PyObject* filename, int mode, PyCom } mod_ty -PyPegen_ASTFromFile(const char *filename, int mode, PyCompilerFlags *flags, PyArena *arena) +PyPegen_ASTFromFilename(const char *filename, int mode, PyCompilerFlags *flags, PyArena *arena) { PyObject *filename_ob = PyUnicode_FromString(filename); if (filename_ob == NULL) { @@ -50,84 +52,3 @@ PyPegen_ASTFromFileObject(FILE *fp, PyObject *filename_ob, int mode, return _PyPegen_run_parser_from_file_pointer(fp, mode, filename_ob, enc, ps1, ps2, flags, errcode, arena); } - -PyCodeObject * -PyPegen_CodeObjectFromString(const char *str, int mode, PyCompilerFlags *flags) -{ - PyArena *arena = PyArena_New(); - if (arena == NULL) { - return NULL; - } - - PyCodeObject *result = NULL; - - PyObject *filename_ob = PyUnicode_FromString(""); - if (filename_ob == NULL) { - goto error; - } - - mod_ty res = PyPegen_ASTFromString(str, mode, flags, arena); - if (res == NULL) { - goto error; - } - - result = PyAST_CompileObject(res, filename_ob, NULL, -1, arena); - -error: - Py_XDECREF(filename_ob); - PyArena_Free(arena); - return result; -} - -PyCodeObject * -PyPegen_CodeObjectFromFile(const char *filename, int mode, PyCompilerFlags* flags) -{ - PyArena *arena = PyArena_New(); - if (arena == NULL) { - return NULL; - } - - PyCodeObject *result = NULL; - - PyObject *filename_ob = PyUnicode_FromString(filename); - if (filename_ob == NULL) { - goto error; - } - - mod_ty res = PyPegen_ASTFromFile(filename, mode, flags, arena); - if (res == NULL) { - goto error; - } - - result = PyAST_CompileObject(res, filename_ob, NULL, -1, arena); - -error: - Py_XDECREF(filename_ob); - PyArena_Free(arena); - return result; -} - -PyCodeObject * -PyPegen_CodeObjectFromFileObject(FILE *fp, PyObject *filename_ob, int mode, - const char *ps1, const char *ps2, - PyCompilerFlags *flags, const char *enc, int *errcode) -{ - PyArena *arena = PyArena_New(); - if (arena == NULL) { - return NULL; - } - - PyCodeObject *result = NULL; - - mod_ty res = PyPegen_ASTFromFileObject(fp, filename_ob, mode, enc, ps1, ps2, - flags, errcode, arena); - if (res == NULL) { - goto error; - } - - result = PyAST_CompileObject(res, filename_ob, NULL, -1, arena); - -error: - PyArena_Free(arena); - return result; -} From webhook-mailer at python.org Fri May 1 13:34:28 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Fri, 01 May 2020 17:34:28 -0000 Subject: [Python-checkins] Simplify choice()'s interaction with the private _randbelow() method (GH-19831) Message-ID: https://github.com/python/cpython/commit/4168f1e46041645cf54bd053981270d8c4c1313b commit: 4168f1e46041645cf54bd053981270d8c4c1313b branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-01T10:34:19-07:00 summary: Simplify choice()'s interaction with the private _randbelow() method (GH-19831) files: M Lib/random.py M Lib/test/test_random.py diff --git a/Lib/random.py b/Lib/random.py index 80fe447db6c86..8f840e1abb908 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -265,10 +265,10 @@ def randint(self, a, b): return self.randrange(a, b+1) def _randbelow_with_getrandbits(self, n): - "Return a random int in the range [0,n). Raises ValueError if n==0." + "Return a random int in the range [0,n). Returns 0 if n==0." if not n: - raise ValueError("Boundary cannot be zero") + return 0 getrandbits = self.getrandbits k = n.bit_length() # don't use (n-1) here because n can be 1 r = getrandbits(k) # 0 <= r < 2**k @@ -277,7 +277,7 @@ def _randbelow_with_getrandbits(self, n): return r def _randbelow_without_getrandbits(self, n, int=int, maxsize=1< https://github.com/python/cpython/commit/831d58d7865cb98fa09227dc614f4f3ce6af968b commit: 831d58d7865cb98fa09227dc614f4f3ce6af968b branch: master author: Shantanu committer: GitHub date: 2020-05-01T18:52:10+01:00 summary: bpo-39691: Clarify io.open_code behavior (GH-19824) files: M Doc/library/io.rst diff --git a/Doc/library/io.rst b/Doc/library/io.rst index f0987da9b6a4c..aecbec56866d7 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -132,12 +132,13 @@ High-level Module Interface Opens the provided file with mode ``'rb'``. This function should be used when the intent is to treat the contents as executable code. - ``path`` should be an absolute path. + ``path`` should be a :class:`str` and an absolute path. The behavior of this function may be overridden by an earlier call to the - :c:func:`PyFile_SetOpenCodeHook`, however, it should always be considered - interchangeable with ``open(path, 'rb')``. Overriding the behavior is - intended for additional validation or preprocessing of the file. + :c:func:`PyFile_SetOpenCodeHook`. However, assuming that ``path`` is a + :class:`str` and an absolute path, ``open_code(path)`` should always behave + the same as ``open(path, 'rb')``. Overriding the behavior is intended for + additional validation or preprocessing of the file. .. versionadded:: 3.8 From webhook-mailer at python.org Fri May 1 14:04:33 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 01 May 2020 18:04:33 -0000 Subject: [Python-checkins] bpo-39691: Clarify io.open_code behavior (GH-19824) Message-ID: https://github.com/python/cpython/commit/c9d7d32b6dc6140f7fcbf1ae1120df6d59fc28d0 commit: c9d7d32b6dc6140f7fcbf1ae1120df6d59fc28d0 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-01T11:04:25-07:00 summary: bpo-39691: Clarify io.open_code behavior (GH-19824) (cherry picked from commit 831d58d7865cb98fa09227dc614f4f3ce6af968b) Co-authored-by: Shantanu files: M Doc/library/io.rst diff --git a/Doc/library/io.rst b/Doc/library/io.rst index 70e01153d4196..32151a0ace458 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -132,12 +132,13 @@ High-level Module Interface Opens the provided file with mode ``'rb'``. This function should be used when the intent is to treat the contents as executable code. - ``path`` should be an absolute path. + ``path`` should be a :class:`str` and an absolute path. The behavior of this function may be overridden by an earlier call to the - :c:func:`PyFile_SetOpenCodeHook`, however, it should always be considered - interchangeable with ``open(path, 'rb')``. Overriding the behavior is - intended for additional validation or preprocessing of the file. + :c:func:`PyFile_SetOpenCodeHook`. However, assuming that ``path`` is a + :class:`str` and an absolute path, ``open_code(path)`` should always behave + the same as ``open(path, 'rb')``. Overriding the behavior is intended for + additional validation or preprocessing of the file. .. versionadded:: 3.8 From webhook-mailer at python.org Fri May 1 14:07:58 2020 From: webhook-mailer at python.org (Gregory Szorc) Date: Fri, 01 May 2020 18:07:58 -0000 Subject: [Python-checkins] bpo-40412: Nullify inittab_copy during finalization (GH-19746) Message-ID: https://github.com/python/cpython/commit/64224a4727321a8dd33e6f769edda401193ebef0 commit: 64224a4727321a8dd33e6f769edda401193ebef0 branch: master author: Gregory Szorc committer: GitHub date: 2020-05-01T11:07:54-07:00 summary: bpo-40412: Nullify inittab_copy during finalization (GH-19746) Otherwise we leave a dangling pointer to free'd memory. If we then initialize a new interpreter in the same process and call PyImport_ExtendInittab, we will (likely) crash when calling PyMem_RawRealloc(inittab_copy, ...) since the pointer address is bogus. Automerge-Triggered-By: @brettcannon files: A Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst M Python/import.c diff --git a/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst b/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst new file mode 100644 index 0000000000000..92bfcddf115a6 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst @@ -0,0 +1 @@ +Nullify inittab_copy during finalization, preventing future interpreter initializations in an embedded situation from crashing. Patch by Gregory Szorc. diff --git a/Python/import.c b/Python/import.c index 8c94e0ec54655..400b02abbdba0 100644 --- a/Python/import.c +++ b/Python/import.c @@ -298,6 +298,7 @@ _PyImport_Fini2(void) /* Free memory allocated by PyImport_ExtendInittab() */ PyMem_RawFree(inittab_copy); + inittab_copy = NULL; PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); } From webhook-mailer at python.org Fri May 1 15:46:10 2020 From: webhook-mailer at python.org (Shantanu) Date: Fri, 01 May 2020 19:46:10 -0000 Subject: [Python-checkins] bpo-39435: Fix docs for pickle.loads (GH-18160) Message-ID: https://github.com/python/cpython/commit/289842ae820f99908d3a345f1f3b6d4e5b4b97fc commit: 289842ae820f99908d3a345f1f3b6d4e5b4b97fc branch: master author: Shantanu committer: GitHub date: 2020-05-01T12:46:01-07:00 summary: bpo-39435: Fix docs for pickle.loads (GH-18160) files: A Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst M Doc/library/pickle.rst M Misc/ACKS diff --git a/Doc/library/pickle.rst b/Doc/library/pickle.rst index a7b92bb9538d9..d92e947a76403 100644 --- a/Doc/library/pickle.rst +++ b/Doc/library/pickle.rst @@ -252,10 +252,10 @@ process more convenient: .. versionchanged:: 3.8 The *buffers* argument was added. -.. function:: loads(bytes_object, \*, fix_imports=True, encoding="ASCII", errors="strict", buffers=None) +.. function:: loads(data, \*, fix_imports=True, encoding="ASCII", errors="strict", buffers=None) Return the reconstituted object hierarchy of the pickled representation - *bytes_object* of an object. + *data* of an object. *data* must be a :term:`bytes-like object`. The protocol version of the pickle is detected automatically, so no protocol argument is needed. Bytes past the pickled representation diff --git a/Misc/ACKS b/Misc/ACKS index 21822dd7524cf..9221f6aae439e 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -790,6 +790,7 @@ Manuel Jacob David Jacobs Kevin Jacobs Kjetil Jacobsen +Shantanu Jain Bertrand Janin Geert Jansen Jack Jansen diff --git a/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst b/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst new file mode 100644 index 0000000000000..40294c10df00a --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst @@ -0,0 +1 @@ +Fix an incorrect signature for :func:`pickle.loads` in the docs \ No newline at end of file From webhook-mailer at python.org Fri May 1 15:53:43 2020 From: webhook-mailer at python.org (Antoine Pitrou) Date: Fri, 01 May 2020 19:53:43 -0000 Subject: [Python-checkins] [3.7] bpo-39435: Fix docs for pickle.loads (GH-18160). (GH-19844) Message-ID: https://github.com/python/cpython/commit/3859b1ac1d7014f8ff673962d94a01a408546e24 commit: 3859b1ac1d7014f8ff673962d94a01a408546e24 branch: 3.7 author: Antoine Pitrou committer: GitHub date: 2020-05-01T12:53:35-07:00 summary: [3.7] bpo-39435: Fix docs for pickle.loads (GH-18160). (GH-19844) (cherry picked from commit 289842ae820f99908d3a345f1f3b6d4e5b4b97fc) Co-authored-by: Shantanu Automerge-Triggered-By: @pitrou files: A Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst M Doc/library/pickle.rst M Misc/ACKS diff --git a/Doc/library/pickle.rst b/Doc/library/pickle.rst index d638944756e7a..25044899f242a 100644 --- a/Doc/library/pickle.rst +++ b/Doc/library/pickle.rst @@ -242,10 +242,10 @@ process more convenient: instances of :class:`~datetime.datetime`, :class:`~datetime.date` and :class:`~datetime.time` pickled by Python 2. -.. function:: loads(bytes_object, \*, fix_imports=True, encoding="ASCII", errors="strict") +.. function:: loads(data, \*, fix_imports=True, encoding="ASCII", errors="strict") Return the reconstituted object hierarchy of the pickled representation - *bytes_object* of an object. + *data* of an object. *data* must be a :term:`bytes-like object`. The protocol version of the pickle is detected automatically, so no protocol argument is needed. Bytes past the pickled representation diff --git a/Misc/ACKS b/Misc/ACKS index 27ef39754afd9..ce269edbd51bf 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -746,6 +746,7 @@ Manuel Jacob David Jacobs Kevin Jacobs Kjetil Jacobsen +Shantanu Jain Bertrand Janin Geert Jansen Jack Jansen diff --git a/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst b/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst new file mode 100644 index 0000000000000..40294c10df00a --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst @@ -0,0 +1 @@ +Fix an incorrect signature for :func:`pickle.loads` in the docs \ No newline at end of file From webhook-mailer at python.org Fri May 1 15:54:48 2020 From: webhook-mailer at python.org (Antoine Pitrou) Date: Fri, 01 May 2020 19:54:48 -0000 Subject: [Python-checkins] [3.8] bpo-39435: Fix docs for pickle.loads (GH-18160) (GH-19843) Message-ID: https://github.com/python/cpython/commit/e05828055e5165cc7268ea3bea33adc502e054a1 commit: e05828055e5165cc7268ea3bea33adc502e054a1 branch: 3.8 author: Antoine Pitrou committer: GitHub date: 2020-05-01T12:54:44-07:00 summary: [3.8] bpo-39435: Fix docs for pickle.loads (GH-18160) (GH-19843) (cherry picked from commit 289842a) Co-authored-by: Shantanu Automerge-Triggered-By: @pitrou files: A Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst M Doc/library/pickle.rst M Misc/ACKS diff --git a/Doc/library/pickle.rst b/Doc/library/pickle.rst index a7b92bb9538d9..d92e947a76403 100644 --- a/Doc/library/pickle.rst +++ b/Doc/library/pickle.rst @@ -252,10 +252,10 @@ process more convenient: .. versionchanged:: 3.8 The *buffers* argument was added. -.. function:: loads(bytes_object, \*, fix_imports=True, encoding="ASCII", errors="strict", buffers=None) +.. function:: loads(data, \*, fix_imports=True, encoding="ASCII", errors="strict", buffers=None) Return the reconstituted object hierarchy of the pickled representation - *bytes_object* of an object. + *data* of an object. *data* must be a :term:`bytes-like object`. The protocol version of the pickle is detected automatically, so no protocol argument is needed. Bytes past the pickled representation diff --git a/Misc/ACKS b/Misc/ACKS index 9d8c0f9a48626..34a6fc439e89c 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -769,6 +769,7 @@ Manuel Jacob David Jacobs Kevin Jacobs Kjetil Jacobsen +Shantanu Jain Bertrand Janin Geert Jansen Jack Jansen diff --git a/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst b/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst new file mode 100644 index 0000000000000..40294c10df00a --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst @@ -0,0 +1 @@ +Fix an incorrect signature for :func:`pickle.loads` in the docs \ No newline at end of file From webhook-mailer at python.org Fri May 1 17:34:03 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 01 May 2020 21:34:03 -0000 Subject: [Python-checkins] Fix the Tools/peg_generator/scripts/benchmark.py script (GH-19848) Message-ID: https://github.com/python/cpython/commit/6bd99d5f002f1cfcc3a975e87684c5238490644a commit: 6bd99d5f002f1cfcc3a975e87684c5238490644a branch: master author: Pablo Galindo committer: GitHub date: 2020-05-01T22:33:54+01:00 summary: Fix the Tools/peg_generator/scripts/benchmark.py script (GH-19848) files: M Tools/peg_generator/pegen/build.py M Tools/peg_generator/scripts/benchmark.py diff --git a/Tools/peg_generator/pegen/build.py b/Tools/peg_generator/pegen/build.py index 94248ffd9431c..d33dd049d63c7 100644 --- a/Tools/peg_generator/pegen/build.py +++ b/Tools/peg_generator/pegen/build.py @@ -15,7 +15,7 @@ from pegen.python_generator import PythonParserGenerator from pegen.tokenizer import Tokenizer -MOD_DIR = pathlib.Path(__file__).parent +MOD_DIR = pathlib.Path(__file__).resolve().parent def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> List[str]: diff --git a/Tools/peg_generator/scripts/benchmark.py b/Tools/peg_generator/scripts/benchmark.py index bc751156e8972..6b4287cd8cecc 100644 --- a/Tools/peg_generator/scripts/benchmark.py +++ b/Tools/peg_generator/scripts/benchmark.py @@ -11,7 +11,7 @@ sys.path.insert(0, os.getcwd()) from peg_extension import parse -from pegen.build import build_parser_and_generator +from pegen.build import build_c_parser_and_generator from scripts.test_parse_directory import parse_directory argparser = argparse.ArgumentParser( @@ -93,8 +93,9 @@ def run_benchmark_stdlib(subcommand, parser): modes = {"compile": 2, "parse": 1, "check": 0} extension = None if parser == "pegen": - extension = build_parser_and_generator( + extension = build_c_parser_and_generator( "../../Grammar/python.gram", + "../../Grammar/Tokens", "peg_extension/parse.c", compile_extension=True, skip_actions=False, From webhook-mailer at python.org Fri May 1 18:14:16 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 01 May 2020 22:14:16 -0000 Subject: [Python-checkins] bpo-40334: use the TOKENS file when checking dangling rules (GH-19849) Message-ID: https://github.com/python/cpython/commit/7ba08ff7b41911f972d0750e068a2270e0dbd68f commit: 7ba08ff7b41911f972d0750e068a2270e0dbd68f branch: master author: Pablo Galindo committer: GitHub date: 2020-05-01T23:14:12+01:00 summary: bpo-40334: use the TOKENS file when checking dangling rules (GH-19849) files: M Tools/peg_generator/pegen/build.py M Tools/peg_generator/pegen/c_generator.py M Tools/peg_generator/pegen/parser_generator.py M Tools/peg_generator/pegen/python_generator.py M Tools/peg_generator/pegen/testutil.py diff --git a/Tools/peg_generator/pegen/build.py b/Tools/peg_generator/pegen/build.py index d33dd049d63c7..907feeaf122de 100644 --- a/Tools/peg_generator/pegen/build.py +++ b/Tools/peg_generator/pegen/build.py @@ -17,6 +17,8 @@ MOD_DIR = pathlib.Path(__file__).resolve().parent +TokenDefinitions = Tuple[Dict[int, str], Dict[str, int], Set[str]] + def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> List[str]: flags = sysconfig.get_config_var(compiler_flags) @@ -112,7 +114,8 @@ def build_parser( return grammar, parser, tokenizer -def generate_token_definitions(tokens: IO[str]) -> Tuple[Dict[str, int], Set[str]]: +def generate_token_definitions(tokens: IO[str]) -> TokenDefinitions: + all_tokens = {} exact_tokens = {} non_exact_tokens = set() numbers = itertools.count(0) @@ -129,13 +132,15 @@ def generate_token_definitions(tokens: IO[str]) -> Tuple[Dict[str, int], Set[str if len(pieces) == 1: (token,) = pieces non_exact_tokens.add(token) + all_tokens[index] = token elif len(pieces) == 2: - _, op = pieces + token, op = pieces exact_tokens[op.strip("'")] = index + all_tokens[index] = token else: raise ValueError(f"Unexpected line found in Tokens file: {line}") - return exact_tokens, non_exact_tokens + return all_tokens, exact_tokens, non_exact_tokens def build_c_generator( @@ -149,10 +154,10 @@ def build_c_generator( skip_actions: bool = False, ) -> ParserGenerator: with open(tokens_file, "r") as tok_file: - exact_tok, non_exact_tok = generate_token_definitions(tok_file) + all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file) with open(output_file, "w") as file: gen: ParserGenerator = CParserGenerator( - grammar, exact_tok, non_exact_tok, file, skip_actions=skip_actions + grammar, all_tokens, exact_tok, non_exact_tok, file, skip_actions=skip_actions ) gen.generate(grammar_file) diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 6c77f43991bbe..c9c67067d4677 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -265,13 +265,14 @@ class CParserGenerator(ParserGenerator, GrammarVisitor): def __init__( self, grammar: grammar.Grammar, + tokens: Dict[int, str], exact_tokens: Dict[str, int], non_exact_tokens: Set[str], file: Optional[IO[Text]], debug: bool = False, skip_actions: bool = False, ): - super().__init__(grammar, file) + super().__init__(grammar, tokens, file) self.callmakervisitor: CCallMakerVisitor = CCallMakerVisitor( self, exact_tokens, non_exact_tokens ) diff --git a/Tools/peg_generator/pegen/parser_generator.py b/Tools/peg_generator/pegen/parser_generator.py index b92df2267762d..03452510b9669 100644 --- a/Tools/peg_generator/pegen/parser_generator.py +++ b/Tools/peg_generator/pegen/parser_generator.py @@ -1,5 +1,4 @@ import contextlib -import token from abc import abstractmethod from typing import AbstractSet, Dict, IO, Iterator, List, Optional, Set, Text, Tuple @@ -19,11 +18,12 @@ class RuleCheckingVisitor(GrammarVisitor): - def __init__(self, rules: Dict[str, Rule]): + def __init__(self, rules: Dict[str, Rule], tokens: Dict[int, str]): self.rules = rules + self.tokens = tokens def visit_NameLeaf(self, node: NameLeaf) -> None: - if node.value not in self.rules and node.value not in token.tok_name.values(): + if node.value not in self.rules and node.value not in self.tokens.values(): # TODO: Add line/col info to (leaf) nodes raise GrammarError(f"Dangling reference to rule {node.value!r}") @@ -32,12 +32,13 @@ class ParserGenerator: callmakervisitor: GrammarVisitor - def __init__(self, grammar: Grammar, file: Optional[IO[Text]]): + def __init__(self, grammar: Grammar, tokens: Dict[int, str], file: Optional[IO[Text]]): self.grammar = grammar + self.tokens = tokens self.rules = grammar.rules if "trailer" not in grammar.metas and "start" not in self.rules: raise GrammarError("Grammar without a trailer must have a 'start' rule") - checker = RuleCheckingVisitor(self.rules) + checker = RuleCheckingVisitor(self.rules, self.tokens) for rule in self.rules.values(): checker.visit(rule) self.file = file diff --git a/Tools/peg_generator/pegen/python_generator.py b/Tools/peg_generator/pegen/python_generator.py index bde27890c15a6..64336552f24f6 100644 --- a/Tools/peg_generator/pegen/python_generator.py +++ b/Tools/peg_generator/pegen/python_generator.py @@ -1,3 +1,4 @@ +import token from typing import Any, Dict, Optional, IO, Text, Tuple from pegen.grammar import ( @@ -123,8 +124,13 @@ def visit_Cut(self, node: Cut) -> Tuple[str, str]: class PythonParserGenerator(ParserGenerator, GrammarVisitor): - def __init__(self, grammar: grammar.Grammar, file: Optional[IO[Text]]): - super().__init__(grammar, file) + def __init__( + self, + grammar: grammar.Grammar, + file: Optional[IO[Text]], + tokens: Dict[int, str] = token.tok_name, + ): + super().__init__(grammar, tokens, file) self.callmakervisitor = PythonCallMakerVisitor(self) def generate(self, filename: str) -> None: diff --git a/Tools/peg_generator/pegen/testutil.py b/Tools/peg_generator/pegen/testutil.py index 1f79d8f702fb1..264659e71768c 100644 --- a/Tools/peg_generator/pegen/testutil.py +++ b/Tools/peg_generator/pegen/testutil.py @@ -17,6 +17,7 @@ from pegen.python_generator import PythonParserGenerator from pegen.tokenizer import Tokenizer +ALL_TOKENS = token.tok_name EXACT_TOKENS = token.EXACT_TOKEN_TYPES # type: ignore NON_EXACT_TOKENS = { name for index, name in token.tok_name.items() if index not in EXACT_TOKENS.values() @@ -76,7 +77,7 @@ def import_file(full_name: str, path: str) -> Any: def generate_c_parser_source(grammar: Grammar) -> str: out = io.StringIO() - genr = CParserGenerator(grammar, EXACT_TOKENS, NON_EXACT_TOKENS, out) + genr = CParserGenerator(grammar, ALL_TOKENS, EXACT_TOKENS, NON_EXACT_TOKENS, out) genr.generate("") return out.getvalue() @@ -96,7 +97,9 @@ def generate_parser_c_extension( assert not os.listdir(path) source = path / "parse.c" with open(source, "w") as file: - genr = CParserGenerator(grammar, EXACT_TOKENS, NON_EXACT_TOKENS, file, debug=debug) + genr = CParserGenerator( + grammar, ALL_TOKENS, EXACT_TOKENS, NON_EXACT_TOKENS, file, debug=debug + ) genr.generate("parse.c") compile_c_extension(str(source), build_dir=str(path)) From webhook-mailer at python.org Fri May 1 19:06:28 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 01 May 2020 23:06:28 -0000 Subject: [Python-checkins] bpo-40412: Nullify inittab_copy during finalization (GH-19746) Message-ID: https://github.com/python/cpython/commit/1205afb3e10194fe22fa76385abb7e522144eb29 commit: 1205afb3e10194fe22fa76385abb7e522144eb29 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-01T16:06:23-07:00 summary: bpo-40412: Nullify inittab_copy during finalization (GH-19746) Otherwise we leave a dangling pointer to free'd memory. If we then initialize a new interpreter in the same process and call PyImport_ExtendInittab, we will (likely) crash when calling PyMem_RawRealloc(inittab_copy, ...) since the pointer address is bogus. Automerge-Triggered-By: @brettcannon (cherry picked from commit 64224a4727321a8dd33e6f769edda401193ebef0) Co-authored-by: Gregory Szorc files: A Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst M Python/import.c diff --git a/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst b/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst new file mode 100644 index 0000000000000..92bfcddf115a6 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst @@ -0,0 +1 @@ +Nullify inittab_copy during finalization, preventing future interpreter initializations in an embedded situation from crashing. Patch by Gregory Szorc. diff --git a/Python/import.c b/Python/import.c index 495012d1c7da6..b4074d1dfc3fa 100644 --- a/Python/import.c +++ b/Python/import.c @@ -300,6 +300,7 @@ _PyImport_Fini2(void) /* Free memory allocated by PyImport_ExtendInittab() */ PyMem_RawFree(inittab_copy); + inittab_copy = NULL; PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); } From webhook-mailer at python.org Fri May 1 19:28:14 2020 From: webhook-mailer at python.org (Robert Rouhani) Date: Fri, 01 May 2020 23:28:14 -0000 Subject: [Python-checkins] bpo-40417: Fix deprecation warning in PyImport_ReloadModule (GH-19750) Message-ID: https://github.com/python/cpython/commit/f40bd466bf14029e2687e36e965875adf9d4be1a commit: f40bd466bf14029e2687e36e965875adf9d4be1a branch: master author: Robert Rouhani committer: GitHub date: 2020-05-01T16:28:06-07:00 summary: bpo-40417: Fix deprecation warning in PyImport_ReloadModule (GH-19750) I can add another commit with the new test case I wrote to verify that the warning was being printed before my change, stopped printing after my change, and that the function does not return null after my change. Automerge-Triggered-By: @brettcannon files: A Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst M Python/import.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst new file mode 100644 index 0000000000000..932e853a8933d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst @@ -0,0 +1 @@ +Fix imp module deprecation warning when PyImport_ReloadModule is called. Patch by Robert Rouhani. diff --git a/Python/import.c b/Python/import.c index 400b02abbdba0..0e2e7c370868f 100644 --- a/Python/import.c +++ b/Python/import.c @@ -1978,23 +1978,23 @@ PyImport_ImportModuleLevel(const char *name, PyObject *globals, PyObject *locals PyObject * PyImport_ReloadModule(PyObject *m) { - _Py_IDENTIFIER(imp); + _Py_IDENTIFIER(importlib); _Py_IDENTIFIER(reload); PyObject *reloaded_module = NULL; - PyObject *imp = _PyImport_GetModuleId(&PyId_imp); - if (imp == NULL) { + PyObject *importlib = _PyImport_GetModuleId(&PyId_importlib); + if (importlib == NULL) { if (PyErr_Occurred()) { return NULL; } - imp = PyImport_ImportModule("imp"); - if (imp == NULL) { + importlib = PyImport_ImportModule("importlib"); + if (importlib == NULL) { return NULL; } } - reloaded_module = _PyObject_CallMethodIdOneArg(imp, &PyId_reload, m); - Py_DECREF(imp); + reloaded_module = _PyObject_CallMethodIdOneArg(importlib, &PyId_reload, m); + Py_DECREF(importlib); return reloaded_module; } From webhook-mailer at python.org Fri May 1 21:14:26 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Sat, 02 May 2020 01:14:26 -0000 Subject: [Python-checkins] bpo-29587: Update gen.throw() to chain exceptions (#19823) Message-ID: https://github.com/python/cpython/commit/02047265eb83a43ba18cc7fee81756f1a1a1f968 commit: 02047265eb83a43ba18cc7fee81756f1a1a1f968 branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-01T18:14:19-07:00 summary: bpo-29587: Update gen.throw() to chain exceptions (#19823) Before this commit, if an exception was active inside a generator when calling gen.throw(), that exception was lost (i.e. there was no implicit exception chaining). This commit fixes that by setting exc.__context__ when calling gen.throw(exc). files: A Misc/NEWS.d/next/Core and Builtins/2020-04-30-00-50-25.bpo-29587.oEwSq.rst M Lib/test/test_generators.py M Objects/genobject.c diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 3e42bc6b69a81..4d96f44b15062 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -316,6 +316,23 @@ def g(): self.assertEqual(cm.exception.value.value, 2) +class GeneratorThrowTest(unittest.TestCase): + + def test_exception_context_set(self): + def f(): + try: + raise KeyError('a') + except Exception: + yield + + gen = f() + gen.send(None) + with self.assertRaises(ValueError) as cm: + gen.throw(ValueError) + context = cm.exception.__context__ + self.assertEqual((type(context), context.args), (KeyError, ('a',))) + + class YieldFromTests(unittest.TestCase): def test_generator_gi_yieldfrom(self): def a(): diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-04-30-00-50-25.bpo-29587.oEwSq.rst b/Misc/NEWS.d/next/Core and Builtins/2020-04-30-00-50-25.bpo-29587.oEwSq.rst new file mode 100644 index 0000000000000..f44aa360cc2ef --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-04-30-00-50-25.bpo-29587.oEwSq.rst @@ -0,0 +1 @@ +Enable implicit exception chaining when calling :meth:`generator.throw`. diff --git a/Objects/genobject.c b/Objects/genobject.c index 6e36690b65148..41a63ae2e666a 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -512,6 +512,15 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, } PyErr_Restore(typ, val, tb); + /* XXX Should we also handle the case where exc_type is true and + exc_value is false? */ + if (gen->gi_exc_state.exc_type && gen->gi_exc_state.exc_value) { + Py_INCREF(gen->gi_exc_state.exc_type); + Py_INCREF(gen->gi_exc_state.exc_value); + Py_XINCREF(gen->gi_exc_state.exc_traceback); + _PyErr_ChainExceptions(gen->gi_exc_state.exc_type, + gen->gi_exc_state.exc_value, gen->gi_exc_state.exc_traceback); + } return gen_send_ex(gen, Py_None, 1, 0); failed_throw: From webhook-mailer at python.org Sat May 2 00:23:16 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sat, 02 May 2020 04:23:16 -0000 Subject: [Python-checkins] Fix some scripts in the peg generator folder (GH-19853) Message-ID: https://github.com/python/cpython/commit/9dbaa8d9f054e53fac0c3d308d0bce3fc8850525 commit: 9dbaa8d9f054e53fac0c3d308d0bce3fc8850525 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-02T05:23:06+01:00 summary: Fix some scripts in the peg generator folder (GH-19853) files: A Tools/peg_generator/data/top-pypi-packages-365-days.json A Tools/peg_generator/peg_extension/__init__.py A Tools/peg_generator/pegen/ast_dump.py D Lib/test/test_peg_generator/ast_dump.py M Lib/test/test_peg_generator/test_c_parser.py M Tools/peg_generator/data/cprog.py M Tools/peg_generator/pegen/build.py M Tools/peg_generator/scripts/benchmark.py M Tools/peg_generator/scripts/show_parse.py M Tools/peg_generator/scripts/test_parse_directory.py M Tools/peg_generator/scripts/test_pypi_packages.py diff --git a/Lib/test/test_peg_generator/test_c_parser.py b/Lib/test/test_peg_generator/test_c_parser.py index 8eb66d5279581..f66b92def9f6c 100644 --- a/Lib/test/test_peg_generator/test_c_parser.py +++ b/Lib/test/test_peg_generator/test_c_parser.py @@ -15,6 +15,7 @@ generate_parser_c_extension, generate_c_parser_source, ) + from pegen.ast_dump import ast_dump TEST_TEMPLATE = """ @@ -24,7 +25,10 @@ import traceback import sys import unittest -from test.test_peg_generator.ast_dump import ast_dump + +from test import test_tools +with test_tools.imports_under_tool("peg_generator"): + from pegen.ast_dump import ast_dump sys.path.insert(0, tmp_dir) import parse diff --git a/Tools/peg_generator/data/cprog.py b/Tools/peg_generator/data/cprog.py index 07b96f0753a98..79a42983dbd23 100644 --- a/Tools/peg_generator/data/cprog.py +++ b/Tools/peg_generator/data/cprog.py @@ -7,4 +7,5 @@ pass elif 1: pass - else: print("else-clause") + else: + print("else-clause") diff --git a/Tools/peg_generator/data/top-pypi-packages-365-days.json b/Tools/peg_generator/data/top-pypi-packages-365-days.json new file mode 100644 index 0000000000000..63ff08436a664 --- /dev/null +++ b/Tools/peg_generator/data/top-pypi-packages-365-days.json @@ -0,0 +1,16011 @@ +{ + "last_update": "2020-01-17 15:34:44", + "query": { + "bytes_billed": 646188105728, + "bytes_processed": 646187256701, + "cached": false, + "estimated_cost": "2.94" + }, + "rows": [ + { + "download_count": 910195765, + "project": "urllib3" + }, + { + "download_count": 749120890, + "project": "six" + }, + { + "download_count": 670113460, + "project": "botocore" + }, + { + "download_count": 629757389, + "project": "python-dateutil" + }, + { + "download_count": 629606070, + "project": "pip" + }, + { + "download_count": 626954494, + "project": "requests" + }, + { + "download_count": 595019137, + "project": "s3transfer" + }, + { + "download_count": 570148733, + "project": "certifi" + }, + { + "download_count": 542241710, + "project": "idna" + }, + { + "download_count": 534393540, + "project": "pyyaml" + }, + { + "download_count": 531342983, + "project": "pyasn1" + }, + { + "download_count": 518080177, + "project": "docutils" + }, + { + "download_count": 516892347, + "project": "chardet" + }, + { + "download_count": 502956749, + "project": "rsa" + }, + { + "download_count": 480905080, + "project": "jmespath" + }, + { + "download_count": 410856025, + "project": "setuptools" + }, + { + "download_count": 410196551, + "project": "pytz" + }, + { + "download_count": 397671253, + "project": "awscli" + }, + { + "download_count": 392932234, + "project": "futures" + }, + { + "download_count": 375594752, + "project": "colorama" + }, + { + "download_count": 346035749, + "project": "simplejson" + }, + { + "download_count": 337185380, + "project": "boto3" + }, + { + "download_count": 305750769, + "project": "numpy" + }, + { + "download_count": 304101394, + "project": "wheel" + }, + { + "download_count": 264199809, + "project": "protobuf" + }, + { + "download_count": 244941990, + "project": "markupsafe" + }, + { + "download_count": 242351858, + "project": "cffi" + }, + { + "download_count": 214070466, + "project": "jinja2" + }, + { + "download_count": 212238740, + "project": "pyasn1-modules" + }, + { + "download_count": 210982876, + "project": "cryptography" + }, + { + "download_count": 190156825, + "project": "attrs" + }, + { + "download_count": 182755695, + "project": "cachetools" + }, + { + "download_count": 178075863, + "project": "google-api-core" + }, + { + "download_count": 177966855, + "project": "enum34" + }, + { + "download_count": 173568874, + "project": "click" + }, + { + "download_count": 168990924, + "project": "future" + }, + { + "download_count": 168313449, + "project": "google-auth" + }, + { + "download_count": 165064404, + "project": "pandas" + }, + { + "download_count": 161184509, + "project": "grpcio" + }, + { + "download_count": 153694077, + "project": "google-cloud-core" + }, + { + "download_count": 152780068, + "project": "pycparser" + }, + { + "download_count": 150391523, + "project": "googleapis-common-protos" + }, + { + "download_count": 145133278, + "project": "pyparsing" + }, + { + "download_count": 143193200, + "project": "werkzeug" + }, + { + "download_count": 136092386, + "project": "pytest" + }, + { + "download_count": 135106914, + "project": "decorator" + }, + { + "download_count": 128924918, + "project": "asn1crypto" + }, + { + "download_count": 126657878, + "project": "more-itertools" + }, + { + "download_count": 126309809, + "project": "awscli-cwlogs" + }, + { + "download_count": 120300118, + "project": "pluggy" + }, + { + "download_count": 117455899, + "project": "flask" + }, + { + "download_count": 116968652, + "project": "scipy" + }, + { + "download_count": 113639938, + "project": "itsdangerous" + }, + { + "download_count": 111213522, + "project": "oauthlib" + }, + { + "download_count": 106969182, + "project": "py" + }, + { + "download_count": 106245186, + "project": "coverage" + }, + { + "download_count": 104256236, + "project": "virtualenv" + }, + { + "download_count": 102765613, + "project": "requests-oauthlib" + }, + { + "download_count": 102590841, + "project": "psutil" + }, + { + "download_count": 102589154, + "project": "ipaddress" + }, + { + "download_count": 102291693, + "project": "jsonschema" + }, + { + "download_count": 100560003, + "project": "scikit-learn" + }, + { + "download_count": 99249602, + "project": "importlib-metadata" + }, + { + "download_count": 95618798, + "project": "pygments" + }, + { + "download_count": 94913658, + "project": "wcwidth" + }, + { + "download_count": 93958133, + "project": "zipp" + }, + { + "download_count": 93185870, + "project": "pyopenssl" + }, + { + "download_count": 92353815, + "project": "pyjwt" + }, + { + "download_count": 92018680, + "project": "mock" + }, + { + "download_count": 90635179, + "project": "wrapt" + }, + { + "download_count": 90150749, + "project": "google-cloud-storage" + }, + { + "download_count": 86097386, + "project": "pillow" + }, + { + "download_count": 85698334, + "project": "websocket-client" + }, + { + "download_count": 84842257, + "project": "packaging" + }, + { + "download_count": 84475934, + "project": "pbr" + }, + { + "download_count": 82019683, + "project": "ipython" + }, + { + "download_count": 81402313, + "project": "prompt-toolkit" + }, + { + "download_count": 80731622, + "project": "matplotlib" + }, + { + "download_count": 80443033, + "project": "httplib2" + }, + { + "download_count": 78391981, + "project": "boto" + }, + { + "download_count": 77428445, + "project": "lxml" + }, + { + "download_count": 76599773, + "project": "docker" + }, + { + "download_count": 75883487, + "project": "atomicwrites" + }, + { + "download_count": 73114976, + "project": "google-resumable-media" + }, + { + "download_count": 72286328, + "project": "sqlalchemy" + }, + { + "download_count": 71355694, + "project": "argparse" + }, + { + "download_count": 70247997, + "project": "kiwisolver" + }, + { + "download_count": 70157529, + "project": "mccabe" + }, + { + "download_count": 69616809, + "project": "configparser" + }, + { + "download_count": 68080016, + "project": "multidict" + }, + { + "download_count": 65738785, + "project": "tqdm" + }, + { + "download_count": 65716434, + "project": "tornado" + }, + { + "download_count": 65152549, + "project": "funcsigs" + }, + { + "download_count": 64373372, + "project": "beautifulsoup4" + }, + { + "download_count": 64241326, + "project": "paramiko" + }, + { + "download_count": 63570436, + "project": "psycopg2" + }, + { + "download_count": 63544025, + "project": "pyrsistent" + }, + { + "download_count": 63424037, + "project": "typing" + }, + { + "download_count": 62605787, + "project": "markdown" + }, + { + "download_count": 62535342, + "project": "google-api-python-client" + }, + { + "download_count": 61655343, + "project": "redis" + }, + { + "download_count": 61634970, + "project": "bcrypt" + }, + { + "download_count": 60696872, + "project": "pexpect" + }, + { + "download_count": 60144339, + "project": "pycodestyle" + }, + { + "download_count": 60125614, + "project": "absl-py" + }, + { + "download_count": 59496247, + "project": "ptyprocess" + }, + { + "download_count": 59137610, + "project": "aiohttp" + }, + { + "download_count": 59052497, + "project": "entrypoints" + }, + { + "download_count": 58282657, + "project": "oauth2client" + }, + { + "download_count": 57910701, + "project": "docopt" + }, + { + "download_count": 57238190, + "project": "pynacl" + }, + { + "download_count": 55087716, + "project": "traitlets" + }, + { + "download_count": 55005408, + "project": "tabulate" + }, + { + "download_count": 54655331, + "project": "backports-functools-lru-cache" + }, + { + "download_count": 54439203, + "project": "lazy-object-proxy" + }, + { + "download_count": 54278961, + "project": "dill" + }, + { + "download_count": 53875643, + "project": "ipython-genutils" + }, + { + "download_count": 53414364, + "project": "pathlib2" + }, + { + "download_count": 53208142, + "project": "isodate" + }, + { + "download_count": 52918821, + "project": "azure-common" + }, + { + "download_count": 52876560, + "project": "gunicorn" + }, + { + "download_count": 52367394, + "project": "uritemplate" + }, + { + "download_count": 52356165, + "project": "cycler" + }, + { + "download_count": 52009177, + "project": "defusedxml" + }, + { + "download_count": 51204829, + "project": "psycopg2-binary" + }, + { + "download_count": 51194283, + "project": "h5py" + }, + { + "download_count": 51011471, + "project": "termcolor" + }, + { + "download_count": 50365341, + "project": "pickleshare" + }, + { + "download_count": 50282815, + "project": "soupsieve" + }, + { + "download_count": 50184503, + "project": "pyflakes" + }, + { + "download_count": 49235593, + "project": "requests-toolbelt" + }, + { + "download_count": 48265870, + "project": "google-cloud-bigquery" + }, + { + "download_count": 47092132, + "project": "tensorboard" + }, + { + "download_count": 46785233, + "project": "typed-ast" + }, + { + "download_count": 46639206, + "project": "networkx" + }, + { + "download_count": 45991420, + "project": "webencodings" + }, + { + "download_count": 45685686, + "project": "async-timeout" + }, + { + "download_count": 45449338, + "project": "tensorflow" + }, + { + "download_count": 45435235, + "project": "gitpython" + }, + { + "download_count": 45275021, + "project": "pymongo" + }, + { + "download_count": 45205520, + "project": "azure-storage-blob" + }, + { + "download_count": 45085736, + "project": "flake8" + }, + { + "download_count": 44565799, + "project": "isort" + }, + { + "download_count": 44491717, + "project": "contextlib2" + }, + { + "download_count": 44308938, + "project": "scandir" + }, + { + "download_count": 44265261, + "project": "functools32" + }, + { + "download_count": 44039749, + "project": "gevent" + }, + { + "download_count": 42987880, + "project": "pytest-cov" + }, + { + "download_count": 42298933, + "project": "docker-pycreds" + }, + { + "download_count": 42280978, + "project": "joblib" + }, + { + "download_count": 42125807, + "project": "yarl" + }, + { + "download_count": 42105718, + "project": "grpc-google-iam-v1" + }, + { + "download_count": 42070985, + "project": "greenlet" + }, + { + "download_count": 41679952, + "project": "zope-interface" + }, + { + "download_count": 41396597, + "project": "pyzmq" + }, + { + "download_count": 41281740, + "project": "pymysql" + }, + { + "download_count": 41194733, + "project": "django" + }, + { + "download_count": 41174124, + "project": "datadog" + }, + { + "download_count": 41132868, + "project": "bleach" + }, + { + "download_count": 40599053, + "project": "astroid" + }, + { + "download_count": 40529351, + "project": "gitdb2" + }, + { + "download_count": 40342805, + "project": "pylint" + }, + { + "download_count": 40116789, + "project": "babel" + }, + { + "download_count": 39847400, + "project": "azure-storage-common" + }, + { + "download_count": 39689270, + "project": "keras-applications" + }, + { + "download_count": 39395842, + "project": "keras-preprocessing" + }, + { + "download_count": 39184540, + "project": "smmap2" + }, + { + "download_count": 38876199, + "project": "opencv-python" + }, + { + "download_count": 38852272, + "project": "subprocess32" + }, + { + "download_count": 38836392, + "project": "msrest" + }, + { + "download_count": 38732044, + "project": "google-auth-httplib2" + }, + { + "download_count": 38166504, + "project": "parso" + }, + { + "download_count": 37940669, + "project": "jedi" + }, + { + "download_count": 37805943, + "project": "pycryptodome" + }, + { + "download_count": 37739739, + "project": "astor" + }, + { + "download_count": 37110085, + "project": "gast" + }, + { + "download_count": 36881409, + "project": "retrying" + }, + { + "download_count": 35451582, + "project": "elasticsearch" + }, + { + "download_count": 35263938, + "project": "jsonpickle" + }, + { + "download_count": 34975483, + "project": "sqlparse" + }, + { + "download_count": 34879648, + "project": "pyarrow" + }, + { + "download_count": 34858569, + "project": "ordereddict" + }, + { + "download_count": 33824794, + "project": "scikit-image" + }, + { + "download_count": 33775490, + "project": "pycrypto" + }, + { + "download_count": 32742937, + "project": "appdirs" + }, + { + "download_count": 32689782, + "project": "toml" + }, + { + "download_count": 32684718, + "project": "adal" + }, + { + "download_count": 32591485, + "project": "azure-nspkg" + }, + { + "download_count": 32103427, + "project": "xlrd" + }, + { + "download_count": 32000159, + "project": "jupyter-core" + }, + { + "download_count": 31774601, + "project": "xmltodict" + }, + { + "download_count": 31736336, + "project": "toolz" + }, + { + "download_count": 31576642, + "project": "cached-property" + }, + { + "download_count": 31550164, + "project": "prometheus-client" + }, + { + "download_count": 31302562, + "project": "tensorflow-estimator" + }, + { + "download_count": 31010564, + "project": "py4j" + }, + { + "download_count": 30527374, + "project": "websockets" + }, + { + "download_count": 30383292, + "project": "dnspython" + }, + { + "download_count": 30245623, + "project": "nbformat" + }, + { + "download_count": 30162734, + "project": "monotonic" + }, + { + "download_count": 29978338, + "project": "nose" + }, + { + "download_count": 29531870, + "project": "typing-extensions" + }, + { + "download_count": 29443454, + "project": "sklearn" + }, + { + "download_count": 29064516, + "project": "cloudpickle" + }, + { + "download_count": 28794637, + "project": "pywavelets" + }, + { + "download_count": 28710649, + "project": "pycryptodomex" + }, + { + "download_count": 28533182, + "project": "ansible" + }, + { + "download_count": 28501824, + "project": "singledispatch" + }, + { + "download_count": 28281846, + "project": "ply" + }, + { + "download_count": 27973857, + "project": "cython" + }, + { + "download_count": 27913607, + "project": "mako" + }, + { + "download_count": 27864029, + "project": "selenium" + }, + { + "download_count": 27848508, + "project": "html5lib" + }, + { + "download_count": 27745677, + "project": "simplegeneric" + }, + { + "download_count": 27671952, + "project": "apache-beam" + }, + { + "download_count": 27579084, + "project": "backcall" + }, + { + "download_count": 26844011, + "project": "msgpack" + }, + { + "download_count": 26331607, + "project": "dask" + }, + { + "download_count": 26266166, + "project": "regex" + }, + { + "download_count": 26239282, + "project": "ipykernel" + }, + { + "download_count": 25952891, + "project": "ujson" + }, + { + "download_count": 25898723, + "project": "mistune" + }, + { + "download_count": 25796973, + "project": "backports-ssl-match-hostname" + }, + { + "download_count": 25756543, + "project": "amqp" + }, + { + "download_count": 25750485, + "project": "jupyter-client" + }, + { + "download_count": 25701706, + "project": "docker-compose" + }, + { + "download_count": 25315661, + "project": "kombu" + }, + { + "download_count": 25281035, + "project": "ruamel-yaml" + }, + { + "download_count": 25271754, + "project": "nltk" + }, + { + "download_count": 25075126, + "project": "alembic" + }, + { + "download_count": 24664889, + "project": "google-auth-oauthlib" + }, + { + "download_count": 24499399, + "project": "raven" + }, + { + "download_count": 24483899, + "project": "python-editor" + }, + { + "download_count": 24388318, + "project": "sortedcontainers" + }, + { + "download_count": 24375921, + "project": "nbconvert" + }, + { + "download_count": 24045975, + "project": "thrift" + }, + { + "download_count": 23835990, + "project": "notebook" + }, + { + "download_count": 23817589, + "project": "hdfs" + }, + { + "download_count": 23689627, + "project": "slackclient" + }, + { + "download_count": 23619686, + "project": "testpath" + }, + { + "download_count": 23536824, + "project": "s3fs" + }, + { + "download_count": 23476069, + "project": "keras" + }, + { + "download_count": 23364791, + "project": "celery" + }, + { + "download_count": 23339282, + "project": "discord-py" + }, + { + "download_count": 23232254, + "project": "billiard" + }, + { + "download_count": 23210897, + "project": "filelock" + }, + { + "download_count": 23187414, + "project": "snowballstemmer" + }, + { + "download_count": 23088875, + "project": "unidecode" + }, + { + "download_count": 23011985, + "project": "netaddr" + }, + { + "download_count": 22993463, + "project": "pandocfilters" + }, + { + "download_count": 22747435, + "project": "send2trash" + }, + { + "download_count": 22715519, + "project": "terminado" + }, + { + "download_count": 22431738, + "project": "backports-shutil-get-terminal-size" + }, + { + "download_count": 22409669, + "project": "backports-weakref" + }, + { + "download_count": 22231171, + "project": "msrestazure" + }, + { + "download_count": 21906531, + "project": "sentry-sdk" + }, + { + "download_count": 21817254, + "project": "ipywidgets" + }, + { + "download_count": 21711592, + "project": "tzlocal" + }, + { + "download_count": 21626474, + "project": "widgetsnbextension" + }, + { + "download_count": 21533795, + "project": "ijson" + }, + { + "download_count": 21335834, + "project": "mysqlclient" + }, + { + "download_count": 20939369, + "project": "tox" + }, + { + "download_count": 20733637, + "project": "lockfile" + }, + { + "download_count": 20642115, + "project": "xgboost" + }, + { + "download_count": 20630795, + "project": "arrow" + }, + { + "download_count": 20559416, + "project": "vine" + }, + { + "download_count": 20399386, + "project": "google-cloud-pubsub" + }, + { + "download_count": 20372136, + "project": "sphinx" + }, + { + "download_count": 20261684, + "project": "djangorestframework" + }, + { + "download_count": 20222772, + "project": "openpyxl" + }, + { + "download_count": 20101811, + "project": "ecdsa" + }, + { + "download_count": 20081473, + "project": "xlsxwriter" + }, + { + "download_count": 20021156, + "project": "snowflake-connector-python" + }, + { + "download_count": 19972964, + "project": "pyhamcrest" + }, + { + "download_count": 19806017, + "project": "google-cloud-firestore" + }, + { + "download_count": 19717486, + "project": "google-cloud-datastore" + }, + { + "download_count": 19580510, + "project": "google-pasta" + }, + { + "download_count": 19191080, + "project": "qtconsole" + }, + { + "download_count": 19179159, + "project": "bs4" + }, + { + "download_count": 19098496, + "project": "text-unidecode" + }, + { + "download_count": 19089305, + "project": "prettytable" + }, + { + "download_count": 19018504, + "project": "jdcal" + }, + { + "download_count": 19002384, + "project": "google-cloud-logging" + }, + { + "download_count": 18962785, + "project": "backports-abc" + }, + { + "download_count": 18918332, + "project": "jupyter-console" + }, + { + "download_count": 18706905, + "project": "smart-open" + }, + { + "download_count": 18670352, + "project": "alabaster" + }, + { + "download_count": 18664013, + "project": "pyspark" + }, + { + "download_count": 18533388, + "project": "jupyter" + }, + { + "download_count": 18480060, + "project": "statsmodels" + }, + { + "download_count": 18431746, + "project": "unicodecsv" + }, + { + "download_count": 18351262, + "project": "dockerpty" + }, + { + "download_count": 18303864, + "project": "shapely" + }, + { + "download_count": 18289269, + "project": "twisted" + }, + { + "download_count": 18288202, + "project": "hiredis" + }, + { + "download_count": 18166239, + "project": "virtualenv-clone" + }, + { + "download_count": 18139397, + "project": "imagesize" + }, + { + "download_count": 18056871, + "project": "idna-ssl" + }, + { + "download_count": 18052633, + "project": "fasteners" + }, + { + "download_count": 18027552, + "project": "marshmallow" + }, + { + "download_count": 18017517, + "project": "plotly" + }, + { + "download_count": 17675962, + "project": "pytest-forked" + }, + { + "download_count": 17577035, + "project": "texttable" + }, + { + "download_count": 17473671, + "project": "et-xmlfile" + }, + { + "download_count": 17113449, + "project": "kubernetes" + }, + { + "download_count": 17078526, + "project": "incremental" + }, + { + "download_count": 16916001, + "project": "iso8601" + }, + { + "download_count": 16883776, + "project": "applicationinsights" + }, + { + "download_count": 16840538, + "project": "google-cloud-bigtable" + }, + { + "download_count": 16823748, + "project": "pathlib" + }, + { + "download_count": 16759673, + "project": "constantly" + }, + { + "download_count": 16691118, + "project": "automat" + }, + { + "download_count": 16638971, + "project": "hyperlink" + }, + { + "download_count": 16463703, + "project": "azure-mgmt-resource" + }, + { + "download_count": 16410162, + "project": "croniter" + }, + { + "download_count": 16390810, + "project": "python-jose" + }, + { + "download_count": 16303498, + "project": "pipenv" + }, + { + "download_count": 15658966, + "project": "pathspec" + }, + { + "download_count": 15520321, + "project": "nvidia-ml-py3" + }, + { + "download_count": 15364508, + "project": "execnet" + }, + { + "download_count": 15314360, + "project": "aniso8601" + }, + { + "download_count": 15247809, + "project": "python-magic" + }, + { + "download_count": 15213240, + "project": "flask-cors" + }, + { + "download_count": 15203298, + "project": "inflection" + }, + { + "download_count": 15113541, + "project": "gym" + }, + { + "download_count": 14999608, + "project": "mypy" + }, + { + "download_count": 14927461, + "project": "azure-mgmt-storage" + }, + { + "download_count": 14835131, + "project": "flask-sqlalchemy" + }, + { + "download_count": 14822442, + "project": "service-identity" + }, + { + "download_count": 14807088, + "project": "mozrunner" + }, + { + "download_count": 14682178, + "project": "argcomplete" + }, + { + "download_count": 14637155, + "project": "faker" + }, + { + "download_count": 14609350, + "project": "uvloop" + }, + { + "download_count": 14582824, + "project": "apipkg" + }, + { + "download_count": 14479520, + "project": "stevedore" + }, + { + "download_count": 14469933, + "project": "azure-storage-nspkg" + }, + { + "download_count": 14356576, + "project": "ndg-httpsclient" + }, + { + "download_count": 14226382, + "project": "pyserial" + }, + { + "download_count": 14190037, + "project": "seaborn" + }, + { + "download_count": 14151070, + "project": "distro" + }, + { + "download_count": 14141290, + "project": "pytest-timeout" + }, + { + "download_count": 14122087, + "project": "bz2file" + }, + { + "download_count": 14098838, + "project": "patsy" + }, + { + "download_count": 14036101, + "project": "cssselect" + }, + { + "download_count": 13956987, + "project": "tenacity" + }, + { + "download_count": 13927328, + "project": "tensorflow-metadata" + }, + { + "download_count": 13870715, + "project": "graphviz" + }, + { + "download_count": 13850391, + "project": "pydot" + }, + { + "download_count": 13813387, + "project": "azure-mgmt-nspkg" + }, + { + "download_count": 13809809, + "project": "avro" + }, + { + "download_count": 13771055, + "project": "imageio" + }, + { + "download_count": 13764392, + "project": "fastavro" + }, + { + "download_count": 13686467, + "project": "gensim" + }, + { + "download_count": 13643493, + "project": "trueskill" + }, + { + "download_count": 13548711, + "project": "statsd" + }, + { + "download_count": 13505330, + "project": "pytest-xdist" + }, + { + "download_count": 13453212, + "project": "azure-mgmt-containerregistry" + }, + { + "download_count": 13380441, + "project": "mypy-extensions" + }, + { + "download_count": 13340370, + "project": "azure-mgmt-keyvault" + }, + { + "download_count": 13259227, + "project": "ua-parser" + }, + { + "download_count": 13241753, + "project": "configobj" + }, + { + "download_count": 13193523, + "project": "mozlog" + }, + { + "download_count": 13161090, + "project": "fuzzywuzzy" + }, + { + "download_count": 13153967, + "project": "google-gax" + }, + { + "download_count": 12999681, + "project": "responses" + }, + { + "download_count": 12946906, + "project": "aliyun-python-sdk-core" + }, + { + "download_count": 12863346, + "project": "azure-datalake-store" + }, + { + "download_count": 12839810, + "project": "pytest-mock" + }, + { + "download_count": 12835022, + "project": "aliyun-python-sdk-ecs" + }, + { + "download_count": 12816025, + "project": "elasticsearch-dsl" + }, + { + "download_count": 12792645, + "project": "azure-mgmt-authorization" + }, + { + "download_count": 12780433, + "project": "google-apitools" + }, + { + "download_count": 12772525, + "project": "python-daemon" + }, + { + "download_count": 12766382, + "project": "azure-graphrbac" + }, + { + "download_count": 12561149, + "project": "netifaces" + }, + { + "download_count": 12538305, + "project": "s3cmd" + }, + { + "download_count": 12534903, + "project": "python-json-logger" + }, + { + "download_count": 12484719, + "project": "aliyunsdkcore" + }, + { + "download_count": 12406280, + "project": "manhole" + }, + { + "download_count": 12261609, + "project": "hvac" + }, + { + "download_count": 12253367, + "project": "humanfriendly" + }, + { + "download_count": 12246930, + "project": "ipdb" + }, + { + "download_count": 12209179, + "project": "deepdiff" + }, + { + "download_count": 12207990, + "project": "freezegun" + }, + { + "download_count": 12098216, + "project": "maxminddb" + }, + { + "download_count": 12042231, + "project": "uwsgi" + }, + { + "download_count": 11947362, + "project": "pykube" + }, + { + "download_count": 11860617, + "project": "appnope" + }, + { + "download_count": 11805813, + "project": "databricks-cli" + }, + { + "download_count": 11788737, + "project": "python-levenshtein" + }, + { + "download_count": 11778504, + "project": "tensorflow-transform" + }, + { + "download_count": 11612558, + "project": "tldextract" + }, + { + "download_count": 11569388, + "project": "pyodbc" + }, + { + "download_count": 11561349, + "project": "autopep8" + }, + { + "download_count": 11432600, + "project": "pendulum" + }, + { + "download_count": 11383453, + "project": "newrelic" + }, + { + "download_count": 11361327, + "project": "python-dotenv" + }, + { + "download_count": 11334209, + "project": "pytzdata" + }, + { + "download_count": 11270038, + "project": "wtforms" + }, + { + "download_count": 11224152, + "project": "pytest-runner" + }, + { + "download_count": 11104163, + "project": "libtmux" + }, + { + "download_count": 11089587, + "project": "zope-deprecation" + }, + { + "download_count": 11017907, + "project": "jsonpointer" + }, + { + "download_count": 10994575, + "project": "webob" + }, + { + "download_count": 10990219, + "project": "retry" + }, + { + "download_count": 10987260, + "project": "blinker" + }, + { + "download_count": 10973921, + "project": "semantic-version" + }, + { + "download_count": 10843556, + "project": "requests-file" + }, + { + "download_count": 10781388, + "project": "graphql-core" + }, + { + "download_count": 10728518, + "project": "blessings" + }, + { + "download_count": 10716974, + "project": "backoff" + }, + { + "download_count": 10695298, + "project": "black" + }, + { + "download_count": 10686016, + "project": "geopy" + }, + { + "download_count": 10629161, + "project": "google-cloud" + }, + { + "download_count": 10551343, + "project": "bottle" + }, + { + "download_count": 10527245, + "project": "pep8" + }, + { + "download_count": 10511519, + "project": "geoip2" + }, + { + "download_count": 10451332, + "project": "grpcio-tools" + }, + { + "download_count": 10410102, + "project": "traceback2" + }, + { + "download_count": 10386312, + "project": "linecache2" + }, + { + "download_count": 10351287, + "project": "django-extensions" + }, + { + "download_count": 10318239, + "project": "sphinxcontrib-websupport" + }, + { + "download_count": 10239847, + "project": "unittest2" + }, + { + "download_count": 10187032, + "project": "fsspec" + }, + { + "download_count": 10146539, + "project": "django-cors-headers" + }, + { + "download_count": 10119472, + "project": "pkginfo" + }, + { + "download_count": 10077843, + "project": "django-filter" + }, + { + "download_count": 10057055, + "project": "secretstorage" + }, + { + "download_count": 10050204, + "project": "user-agents" + }, + { + "download_count": 10001744, + "project": "configargparse" + }, + { + "download_count": 9957349, + "project": "scp" + }, + { + "download_count": 9942530, + "project": "azure-devops" + }, + { + "download_count": 9938936, + "project": "azure-mgmt-compute" + }, + { + "download_count": 9934159, + "project": "azure-mgmt-network" + }, + { + "download_count": 9904711, + "project": "msgpack-python" + }, + { + "download_count": 9827614, + "project": "azure-mgmt-datalake-nspkg" + }, + { + "download_count": 9735081, + "project": "azure-mgmt-datalake-store" + }, + { + "download_count": 9706197, + "project": "google-cloud-monitoring" + }, + { + "download_count": 9674967, + "project": "mpi4py" + }, + { + "download_count": 9609045, + "project": "mozdevice" + }, + { + "download_count": 9561083, + "project": "azure-keyvault" + }, + { + "download_count": 9523786, + "project": "pysocks" + }, + { + "download_count": 9521848, + "project": "azure-cli" + }, + { + "download_count": 9493349, + "project": "jsondiff" + }, + { + "download_count": 9467938, + "project": "cherrypy" + }, + { + "download_count": 9467625, + "project": "pika" + }, + { + "download_count": 9410911, + "project": "parsedatetime" + }, + { + "download_count": 9399772, + "project": "azure-mgmt-batch" + }, + { + "download_count": 9376391, + "project": "lightgbm" + }, + { + "download_count": 9375734, + "project": "querystring-parser" + }, + { + "download_count": 9342152, + "project": "pyrfc3339" + }, + { + "download_count": 9319192, + "project": "argh" + }, + { + "download_count": 9315946, + "project": "pyproj" + }, + { + "download_count": 9307163, + "project": "mozprofile" + }, + { + "download_count": 9301729, + "project": "pycurl" + }, + { + "download_count": 9288555, + "project": "dictdiffer" + }, + { + "download_count": 9274785, + "project": "flask-wtf" + }, + { + "download_count": 9274704, + "project": "mysql-connector-python" + }, + { + "download_count": 9272854, + "project": "cheroot" + }, + { + "download_count": 9261620, + "project": "codecov" + }, + { + "download_count": 9224842, + "project": "mozinfo" + }, + { + "download_count": 9222371, + "project": "jsonpatch" + }, + { + "download_count": 9217176, + "project": "glob2" + }, + { + "download_count": 9059754, + "project": "azure-batch" + }, + { + "download_count": 9057979, + "project": "crcmod" + }, + { + "download_count": 9033939, + "project": "jaraco-functools" + }, + { + "download_count": 8995380, + "project": "tempora" + }, + { + "download_count": 8959399, + "project": "azure-mgmt-dns" + }, + { + "download_count": 8945640, + "project": "pyhive" + }, + { + "download_count": 8906609, + "project": "azure-mgmt-rdbms" + }, + { + "download_count": 8891960, + "project": "azure-mgmt-sql" + }, + { + "download_count": 8888437, + "project": "mozprocess" + }, + { + "download_count": 8874708, + "project": "portend" + }, + { + "download_count": 8853246, + "project": "geographiclib" + }, + { + "download_count": 8803957, + "project": "azure-mgmt-web" + }, + { + "download_count": 8753999, + "project": "deprecated" + }, + { + "download_count": 8739361, + "project": "munch" + }, + { + "download_count": 8687617, + "project": "jpype1" + }, + { + "download_count": 8659485, + "project": "pysftp" + }, + { + "download_count": 8648248, + "project": "watchdog" + }, + { + "download_count": 8644057, + "project": "ruamel-yaml-clib" + }, + { + "download_count": 8628293, + "project": "mlflow" + }, + { + "download_count": 8605163, + "project": "kafka-python" + }, + { + "download_count": 8593398, + "project": "google" + }, + { + "download_count": 8591157, + "project": "gapic-google-cloud-logging-v2" + }, + { + "download_count": 8565550, + "project": "mujoco-py" + }, + { + "download_count": 8557624, + "project": "zeep" + }, + { + "download_count": 8557527, + "project": "proto-google-cloud-logging-v2" + }, + { + "download_count": 8555221, + "project": "azure-storage" + }, + { + "download_count": 8548889, + "project": "pathtools" + }, + { + "download_count": 8547554, + "project": "django-storages" + }, + { + "download_count": 8493425, + "project": "spacy" + }, + { + "download_count": 8479997, + "project": "pytest-instafail" + }, + { + "download_count": 8476835, + "project": "thinc" + }, + { + "download_count": 8468171, + "project": "factory-boy" + }, + { + "download_count": 8466351, + "project": "preshed" + }, + { + "download_count": 8433752, + "project": "google-cloud-spanner" + }, + { + "download_count": 8433718, + "project": "simpleflock" + }, + { + "download_count": 8402292, + "project": "cymem" + }, + { + "download_count": 8374248, + "project": "azure-storage-queue" + }, + { + "download_count": 8367380, + "project": "azure-mgmt-monitor" + }, + { + "download_count": 8361234, + "project": "murmurhash" + }, + { + "download_count": 8360473, + "project": "jeepney" + }, + { + "download_count": 8358801, + "project": "azure-mgmt-containerservice" + }, + { + "download_count": 8334989, + "project": "zc-lockfile" + }, + { + "download_count": 8334854, + "project": "numpy-stl" + }, + { + "download_count": 8334779, + "project": "requests-mock" + }, + { + "download_count": 8331547, + "project": "tensorflow-serving-api" + }, + { + "download_count": 8316359, + "project": "passlib" + }, + { + "download_count": 8257864, + "project": "aws-xray-sdk" + }, + { + "download_count": 8253117, + "project": "waitress" + }, + { + "download_count": 8213115, + "project": "azure-mgmt-containerinstance" + }, + { + "download_count": 8194190, + "project": "oauth" + }, + { + "download_count": 8192420, + "project": "azure-mgmt-redis" + }, + { + "download_count": 8182626, + "project": "azure-mgmt-cognitiveservices" + }, + { + "download_count": 8169888, + "project": "fabric" + }, + { + "download_count": 8160603, + "project": "sphinx-rtd-theme" + }, + { + "download_count": 8151766, + "project": "azure-mgmt-trafficmanager" + }, + { + "download_count": 8146427, + "project": "pystache" + }, + { + "download_count": 8142774, + "project": "python-slugify" + }, + { + "download_count": 8104254, + "project": "azure-mgmt-devtestlabs" + }, + { + "download_count": 8101969, + "project": "sh" + }, + { + "download_count": 8100079, + "project": "azure-mgmt-cdn" + }, + { + "download_count": 8084499, + "project": "azure-mgmt-datalake-analytics" + }, + { + "download_count": 8068973, + "project": "pyaml" + }, + { + "download_count": 8068659, + "project": "azure-mgmt-iothub" + }, + { + "download_count": 8045085, + "project": "azure-mgmt-cosmosdb" + }, + { + "download_count": 8043637, + "project": "jira" + }, + { + "download_count": 8016426, + "project": "mozterm" + }, + { + "download_count": 8000597, + "project": "flask-login" + }, + { + "download_count": 7983143, + "project": "pycairo" + }, + { + "download_count": 7981647, + "project": "invoke" + }, + { + "download_count": 7969857, + "project": "pyxdg" + }, + { + "download_count": 7896477, + "project": "flask-restful" + }, + { + "download_count": 7892342, + "project": "pymssql" + }, + { + "download_count": 7872871, + "project": "plac" + }, + { + "download_count": 7871712, + "project": "colorlog" + }, + { + "download_count": 7841110, + "project": "stripe" + }, + { + "download_count": 7795667, + "project": "pygobject" + }, + { + "download_count": 7793570, + "project": "vsts" + }, + { + "download_count": 7786931, + "project": "azure-mgmt-applicationinsights" + }, + { + "download_count": 7755436, + "project": "azure-cosmosdb-table" + }, + { + "download_count": 7751414, + "project": "zope-event" + }, + { + "download_count": 7745717, + "project": "gspread" + }, + { + "download_count": 7724172, + "project": "phonenumbers" + }, + { + "download_count": 7698105, + "project": "torch" + }, + { + "download_count": 7677484, + "project": "django-debug-toolbar" + }, + { + "download_count": 7669014, + "project": "azure-mgmt-eventhub" + }, + { + "download_count": 7653695, + "project": "sendgrid" + }, + { + "download_count": 7621120, + "project": "azure-core" + }, + { + "download_count": 7618409, + "project": "requests-aws4auth" + }, + { + "download_count": 7606270, + "project": "zope-component" + }, + { + "download_count": 7602809, + "project": "azure-mgmt-marketplaceordering" + }, + { + "download_count": 7589910, + "project": "holidays" + }, + { + "download_count": 7568947, + "project": "azure-cosmosdb-nspkg" + }, + { + "download_count": 7560913, + "project": "azure-mgmt-servicebus" + }, + { + "download_count": 7555791, + "project": "azure-mgmt-loganalytics" + }, + { + "download_count": 7533328, + "project": "azure-mgmt-recoveryservices" + }, + { + "download_count": 7532133, + "project": "azure-mgmt-recoveryservicesbackup" + }, + { + "download_count": 7519987, + "project": "azure-mgmt-eventgrid" + }, + { + "download_count": 7511851, + "project": "simple-salesforce" + }, + { + "download_count": 7493612, + "project": "azure-mgmt-reservations" + }, + { + "download_count": 7490404, + "project": "mysql-python" + }, + { + "download_count": 7471849, + "project": "azure-mgmt-advisor" + }, + { + "download_count": 7470909, + "project": "azure-mgmt-media" + }, + { + "download_count": 7461600, + "project": "backports-tempfile" + }, + { + "download_count": 7452831, + "project": "azure-mgmt-msi" + }, + { + "download_count": 7444403, + "project": "azure-mgmt-batchai" + }, + { + "download_count": 7443190, + "project": "azure-mgmt-iothubprovisioningservices" + }, + { + "download_count": 7427082, + "project": "azure-mgmt-search" + }, + { + "download_count": 7426073, + "project": "azure-mgmt-consumption" + }, + { + "download_count": 7421118, + "project": "azure-mgmt-servicefabric" + }, + { + "download_count": 7420661, + "project": "azure-mgmt-billing" + }, + { + "download_count": 7410977, + "project": "semver" + }, + { + "download_count": 7399599, + "project": "w3lib" + }, + { + "download_count": 7377445, + "project": "supervisor" + }, + { + "download_count": 7371140, + "project": "moto" + }, + { + "download_count": 7360517, + "project": "josepy" + }, + { + "download_count": 7359916, + "project": "azure-mgmt-relay" + }, + { + "download_count": 7325634, + "project": "pandas-gbq" + }, + { + "download_count": 7317868, + "project": "acme" + }, + { + "download_count": 7308144, + "project": "azure-servicebus" + }, + { + "download_count": 7271321, + "project": "xlwt" + }, + { + "download_count": 7270699, + "project": "structlog" + }, + { + "download_count": 7268987, + "project": "sphinxcontrib-serializinghtml" + }, + { + "download_count": 7268175, + "project": "sphinxcontrib-htmlhelp" + }, + { + "download_count": 7251725, + "project": "keyring" + }, + { + "download_count": 7251674, + "project": "sphinxcontrib-qthelp" + }, + { + "download_count": 7251256, + "project": "sphinxcontrib-devhelp" + }, + { + "download_count": 7251076, + "project": "sphinxcontrib-applehelp" + }, + { + "download_count": 7250627, + "project": "sphinxcontrib-jsmath" + }, + { + "download_count": 7239285, + "project": "pytest-django" + }, + { + "download_count": 7236146, + "project": "voluptuous" + }, + { + "download_count": 7235602, + "project": "llvmlite" + }, + { + "download_count": 7112734, + "project": "theano" + }, + { + "download_count": 7042677, + "project": "numba" + }, + { + "download_count": 7038235, + "project": "shellingham" + }, + { + "download_count": 7023740, + "project": "pydocumentdb" + }, + { + "download_count": 7014759, + "project": "parse" + }, + { + "download_count": 7011858, + "project": "coloredlogs" + }, + { + "download_count": 6991011, + "project": "certbot" + }, + { + "download_count": 6989202, + "project": "google-cloud-vision" + }, + { + "download_count": 6983443, + "project": "influxdb" + }, + { + "download_count": 6981795, + "project": "azure-mgmt-managementgroups" + }, + { + "download_count": 6962527, + "project": "azure-mgmt-datamigration" + }, + { + "download_count": 6935874, + "project": "cheetah" + }, + { + "download_count": 6931267, + "project": "azure-mgmt-policyinsights" + }, + { + "download_count": 6910342, + "project": "python-augeas" + }, + { + "download_count": 6902895, + "project": "tblib" + }, + { + "download_count": 6885492, + "project": "azure-mgmt-iotcentral" + }, + { + "download_count": 6882533, + "project": "azure-mgmt-signalr" + }, + { + "download_count": 6879787, + "project": "instana" + }, + { + "download_count": 6848658, + "project": "uptime" + }, + { + "download_count": 6823328, + "project": "azure-mgmt-maps" + }, + { + "download_count": 6811121, + "project": "coreapi" + }, + { + "download_count": 6805884, + "project": "setproctitle" + }, + { + "download_count": 6803339, + "project": "pymemcache" + }, + { + "download_count": 6790921, + "project": "opt-einsum" + }, + { + "download_count": 6746204, + "project": "coreschema" + }, + { + "download_count": 6733204, + "project": "dicttoxml" + }, + { + "download_count": 6709540, + "project": "python-mimeparse" + }, + { + "download_count": 6686487, + "project": "letsencrypt" + }, + { + "download_count": 6671209, + "project": "pypdf2" + }, + { + "download_count": 6659143, + "project": "certbot-apache" + }, + { + "download_count": 6650051, + "project": "feedparser" + }, + { + "download_count": 6629341, + "project": "itypes" + }, + { + "download_count": 6607528, + "project": "datetime" + }, + { + "download_count": 6595896, + "project": "pyglet" + }, + { + "download_count": 6565703, + "project": "pywin32" + }, + { + "download_count": 6555587, + "project": "cachecontrol" + }, + { + "download_count": 6537738, + "project": "whichcraft" + }, + { + "download_count": 6493687, + "project": "repoze-lru" + }, + { + "download_count": 6483589, + "project": "opentracing" + }, + { + "download_count": 6471332, + "project": "yapf" + }, + { + "download_count": 6470521, + "project": "reportlab" + }, + { + "download_count": 6454108, + "project": "pyperclip" + }, + { + "download_count": 6427226, + "project": "sasl" + }, + { + "download_count": 6416154, + "project": "pydocstyle" + }, + { + "download_count": 6412179, + "project": "ldap3" + }, + { + "download_count": 6364528, + "project": "python-http-client" + }, + { + "download_count": 6363103, + "project": "pycountry" + }, + { + "download_count": 6348755, + "project": "azure-servicemanagement-legacy" + }, + { + "download_count": 6348419, + "project": "certbot-nginx" + }, + { + "download_count": 6347386, + "project": "python-gnupg" + }, + { + "download_count": 6338642, + "project": "suds-jurko" + }, + { + "download_count": 6325028, + "project": "promise" + }, + { + "download_count": 6321828, + "project": "twine" + }, + { + "download_count": 6310843, + "project": "django-redis" + }, + { + "download_count": 6310630, + "project": "redis-py-cluster" + }, + { + "download_count": 6301931, + "project": "mysql-connector" + }, + { + "download_count": 6295377, + "project": "python-jenkins" + }, + { + "download_count": 6275920, + "project": "azure-servicefabric" + }, + { + "download_count": 6251258, + "project": "expiringdict" + }, + { + "download_count": 6237744, + "project": "pyvcf" + }, + { + "download_count": 6217846, + "project": "watchtower" + }, + { + "download_count": 6191358, + "project": "poyo" + }, + { + "download_count": 6177944, + "project": "html2text" + }, + { + "download_count": 6167605, + "project": "binaryornot" + }, + { + "download_count": 6156388, + "project": "azure-mgmt" + }, + { + "download_count": 6141630, + "project": "bokeh" + }, + { + "download_count": 6124335, + "project": "python3-openid" + }, + { + "download_count": 6124110, + "project": "azure-storage-file" + }, + { + "download_count": 6123086, + "project": "oscrypto" + }, + { + "download_count": 6089609, + "project": "kazoo" + }, + { + "download_count": 6087309, + "project": "cookiecutter" + }, + { + "download_count": 6069231, + "project": "jinja2-time" + }, + { + "download_count": 6060397, + "project": "azure" + }, + { + "download_count": 6048114, + "project": "google-cloud-translate" + }, + { + "download_count": 6041366, + "project": "humanize" + }, + { + "download_count": 6039221, + "project": "numexpr" + }, + { + "download_count": 6020894, + "project": "twilio" + }, + { + "download_count": 6012401, + "project": "cerberus" + }, + { + "download_count": 6012147, + "project": "azure-mgmt-logic" + }, + { + "download_count": 6006198, + "project": "google-cloud-language" + }, + { + "download_count": 6003966, + "project": "nodeenv" + }, + { + "download_count": 5973514, + "project": "azure-mgmt-scheduler" + }, + { + "download_count": 5943411, + "project": "backports-csv" + }, + { + "download_count": 5918171, + "project": "multi-key-dict" + }, + { + "download_count": 5880962, + "project": "python-memcached" + }, + { + "download_count": 5873333, + "project": "srsly" + }, + { + "download_count": 5867465, + "project": "cx-oracle" + }, + { + "download_count": 5859924, + "project": "blis" + }, + { + "download_count": 5855262, + "project": "azure-mgmt-datafactory" + }, + { + "download_count": 5829317, + "project": "identify" + }, + { + "download_count": 5817248, + "project": "pydata-google-auth" + }, + { + "download_count": 5816751, + "project": "parsel" + }, + { + "download_count": 5808925, + "project": "setuptools-scm" + }, + { + "download_count": 5798570, + "project": "confluent-kafka" + }, + { + "download_count": 5780362, + "project": "lunardate" + }, + { + "download_count": 5770962, + "project": "eventlet" + }, + { + "download_count": 5764369, + "project": "webtest" + }, + { + "download_count": 5762114, + "project": "sqlalchemy-utils" + }, + { + "download_count": 5748385, + "project": "pre-commit" + }, + { + "download_count": 5744591, + "project": "flask-restplus" + }, + { + "download_count": 5741800, + "project": "google-cloud-error-reporting" + }, + { + "download_count": 5727692, + "project": "gapic-google-cloud-datastore-v1" + }, + { + "download_count": 5726258, + "project": "google-cloud-speech" + }, + { + "download_count": 5696390, + "project": "tensorflow-gpu" + }, + { + "download_count": 5671626, + "project": "youtube-dl" + }, + { + "download_count": 5669862, + "project": "zope-proxy" + }, + { + "download_count": 5668657, + "project": "zope-hookable" + }, + { + "download_count": 5666674, + "project": "aspy-yaml" + }, + { + "download_count": 5665846, + "project": "pystan" + }, + { + "download_count": 5658876, + "project": "meld3" + }, + { + "download_count": 5657136, + "project": "zope-deferredimport" + }, + { + "download_count": 5646525, + "project": "altgraph" + }, + { + "download_count": 5638012, + "project": "yamllint" + }, + { + "download_count": 5627465, + "project": "pydispatcher" + }, + { + "download_count": 5598597, + "project": "pytest-html" + }, + { + "download_count": 5589472, + "project": "queuelib" + }, + { + "download_count": 5580580, + "project": "mpmath" + }, + { + "download_count": 5556096, + "project": "wasabi" + }, + { + "download_count": 5538810, + "project": "dateparser" + }, + { + "download_count": 5522745, + "project": "azure-mgmt-subscription" + }, + { + "download_count": 5500243, + "project": "flask-migrate" + }, + { + "download_count": 5494861, + "project": "cfgv" + }, + { + "download_count": 5490908, + "project": "azure-mgmt-notificationhubs" + }, + { + "download_count": 5479229, + "project": "azure-mgmt-managementpartner" + }, + { + "download_count": 5477766, + "project": "azure-mgmt-powerbiembedded" + }, + { + "download_count": 5471458, + "project": "azure-eventgrid" + }, + { + "download_count": 5469115, + "project": "azure-mgmt-commerce" + }, + { + "download_count": 5465959, + "project": "azure-mgmt-machinelearningcompute" + }, + { + "download_count": 5462201, + "project": "readme-renderer" + }, + { + "download_count": 5461957, + "project": "azure-mgmt-hanaonazure" + }, + { + "download_count": 5447652, + "project": "rfc3986" + }, + { + "download_count": 5440586, + "project": "scrapy" + }, + { + "download_count": 5434695, + "project": "aenum" + }, + { + "download_count": 5420091, + "project": "anyjson" + }, + { + "download_count": 5407106, + "project": "proto-google-cloud-datastore-v1" + }, + { + "download_count": 5387258, + "project": "sympy" + }, + { + "download_count": 5374203, + "project": "pygithub" + }, + { + "download_count": 5373585, + "project": "pytest-metadata" + }, + { + "download_count": 5340852, + "project": "paho-mqtt" + }, + { + "download_count": 5335035, + "project": "multiprocess" + }, + { + "download_count": 5333251, + "project": "googledatastore" + }, + { + "download_count": 5328607, + "project": "phoenixdb" + }, + { + "download_count": 5322559, + "project": "nose-exclude" + }, + { + "download_count": 5309246, + "project": "importlib-resources" + }, + { + "download_count": 5299450, + "project": "cookies" + }, + { + "download_count": 5277019, + "project": "tensorflow-tensorboard" + }, + { + "download_count": 5255084, + "project": "thrift-sasl" + }, + { + "download_count": 5249244, + "project": "jsonpath-rw" + }, + { + "download_count": 5245636, + "project": "oslo-i18n" + }, + { + "download_count": 5245466, + "project": "s2sphere" + }, + { + "download_count": 5245010, + "project": "whitenoise" + }, + { + "download_count": 5236181, + "project": "google-cloud-dns" + }, + { + "download_count": 5223390, + "project": "aws-sam-translator" + }, + { + "download_count": 5213027, + "project": "slacker" + }, + { + "download_count": 5165706, + "project": "hypothesis" + }, + { + "download_count": 5155283, + "project": "google-cloud-resource-manager" + }, + { + "download_count": 5152438, + "project": "debtcollector" + }, + { + "download_count": 5141790, + "project": "ruamel-ordereddict" + }, + { + "download_count": 5136659, + "project": "azure-loganalytics" + }, + { + "download_count": 5089358, + "project": "rx" + }, + { + "download_count": 5083806, + "project": "discord" + }, + { + "download_count": 5082337, + "project": "click-plugins" + }, + { + "download_count": 5069136, + "project": "google-cloud-videointelligence" + }, + { + "download_count": 5067821, + "project": "google-cloud-runtimeconfig" + }, + { + "download_count": 5043933, + "project": "inflect" + }, + { + "download_count": 5006490, + "project": "pulp" + }, + { + "download_count": 5001567, + "project": "oslo-utils" + }, + { + "download_count": 4965630, + "project": "azure-mgmt-devspaces" + }, + { + "download_count": 4949806, + "project": "stringcase" + }, + { + "download_count": 4926195, + "project": "django-appconf" + }, + { + "download_count": 4913373, + "project": "pynamodb" + }, + { + "download_count": 4913090, + "project": "dogpile-cache" + }, + { + "download_count": 4899768, + "project": "python-consul" + }, + { + "download_count": 4896198, + "project": "milksnake" + }, + { + "download_count": 4875874, + "project": "pypng" + }, + { + "download_count": 4868256, + "project": "oslo-config" + }, + { + "download_count": 4857940, + "project": "haversine" + }, + { + "download_count": 4854545, + "project": "azure-applicationinsights" + }, + { + "download_count": 4830085, + "project": "flower" + }, + { + "download_count": 4787508, + "project": "bandit" + }, + { + "download_count": 4766743, + "project": "strict-rfc3339" + }, + { + "download_count": 4744246, + "project": "findspark" + }, + { + "download_count": 4742234, + "project": "flask-admin" + }, + { + "download_count": 4742026, + "project": "qds-sdk" + }, + { + "download_count": 4735803, + "project": "pip-tools" + }, + { + "download_count": 4701984, + "project": "cliff" + }, + { + "download_count": 4701803, + "project": "ddtrace" + }, + { + "download_count": 4693878, + "project": "progressbar2" + }, + { + "download_count": 4652633, + "project": "python-utils" + }, + { + "download_count": 4645712, + "project": "cairocffi" + }, + { + "download_count": 4645547, + "project": "google-cloud-trace" + }, + { + "download_count": 4636704, + "project": "docker-py" + }, + { + "download_count": 4632853, + "project": "tinycss2" + }, + { + "download_count": 4627762, + "project": "apscheduler" + }, + { + "download_count": 4606642, + "project": "python-pam" + }, + { + "download_count": 4606137, + "project": "grpcio-gcp" + }, + { + "download_count": 4605186, + "project": "parse-type" + }, + { + "download_count": 4601072, + "project": "parameterized" + }, + { + "download_count": 4600206, + "project": "avro-python3" + }, + { + "download_count": 4589906, + "project": "pypiwin32" + }, + { + "download_count": 4587705, + "project": "olefile" + }, + { + "download_count": 4586230, + "project": "testtools" + }, + { + "download_count": 4583482, + "project": "dj-database-url" + }, + { + "download_count": 4572193, + "project": "basictracer" + }, + { + "download_count": 4567533, + "project": "macholib" + }, + { + "download_count": 4563623, + "project": "cligj" + }, + { + "download_count": 4560977, + "project": "google-cloud-container" + }, + { + "download_count": 4553683, + "project": "oslo-serialization" + }, + { + "download_count": 4544031, + "project": "logging" + }, + { + "download_count": 4543347, + "project": "click-completion" + }, + { + "download_count": 4542581, + "project": "pycares" + }, + { + "download_count": 4461143, + "project": "fiona" + }, + { + "download_count": 4454845, + "project": "mmh3" + }, + { + "download_count": 4447608, + "project": "jws" + }, + { + "download_count": 4433310, + "project": "python-docx" + }, + { + "download_count": 4432803, + "project": "mleap" + }, + { + "download_count": 4430881, + "project": "extras" + }, + { + "download_count": 4394588, + "project": "dataclasses" + }, + { + "download_count": 4384805, + "project": "fixtures" + }, + { + "download_count": 4368983, + "project": "cfn-lint" + }, + { + "download_count": 4347507, + "project": "cairosvg" + }, + { + "download_count": 4345671, + "project": "lz4" + }, + { + "download_count": 4341286, + "project": "flask-script" + }, + { + "download_count": 4335840, + "project": "statistics" + }, + { + "download_count": 4332342, + "project": "fbprophet" + }, + { + "download_count": 4329185, + "project": "cmd2" + }, + { + "download_count": 4323965, + "project": "brotli" + }, + { + "download_count": 4323647, + "project": "cytoolz" + }, + { + "download_count": 4315817, + "project": "polyaxon-client" + }, + { + "download_count": 4309639, + "project": "portalocker" + }, + { + "download_count": 4302427, + "project": "torchvision" + }, + { + "download_count": 4299923, + "project": "bumpversion" + }, + { + "download_count": 4291946, + "project": "python-jwt" + }, + { + "download_count": 4264873, + "project": "polyaxon-cli" + }, + { + "download_count": 4263296, + "project": "polyaxon-deploy" + }, + { + "download_count": 4260496, + "project": "coveralls" + }, + { + "download_count": 4256821, + "project": "python-geohash" + }, + { + "download_count": 4247442, + "project": "flask-caching" + }, + { + "download_count": 4223430, + "project": "cssselect2" + }, + { + "download_count": 4217166, + "project": "behave" + }, + { + "download_count": 4198998, + "project": "mozfile" + }, + { + "download_count": 4198846, + "project": "ddt" + }, + { + "download_count": 4192314, + "project": "aiodns" + }, + { + "download_count": 4180658, + "project": "googleads" + }, + { + "download_count": 4151629, + "project": "flake8-polyfill" + }, + { + "download_count": 4142826, + "project": "pyphen" + }, + { + "download_count": 4130090, + "project": "fastparquet" + }, + { + "download_count": 4125828, + "project": "flask-babel" + }, + { + "download_count": 4114954, + "project": "gcloud" + }, + { + "download_count": 4098408, + "project": "google-cloud-bigquery-datatransfer" + }, + { + "download_count": 4088308, + "project": "gorilla" + }, + { + "download_count": 4081407, + "project": "keystoneauth1" + }, + { + "download_count": 4077553, + "project": "requests-futures" + }, + { + "download_count": 4054249, + "project": "azureml-core" + }, + { + "download_count": 4042252, + "project": "python-ldap" + }, + { + "download_count": 4007776, + "project": "pathos" + }, + { + "download_count": 3999757, + "project": "ephem" + }, + { + "download_count": 3969692, + "project": "hyperopt" + }, + { + "download_count": 3949966, + "project": "testfixtures" + }, + { + "download_count": 3937830, + "project": "fonttools" + }, + { + "download_count": 3935226, + "project": "terminaltables" + }, + { + "download_count": 3927254, + "project": "easyprocess" + }, + { + "download_count": 3922990, + "project": "python-gflags" + }, + { + "download_count": 3912801, + "project": "deprecation" + }, + { + "download_count": 3905705, + "project": "nvidia-ml-py" + }, + { + "download_count": 3885807, + "project": "google-cloud-kms" + }, + { + "download_count": 3865843, + "project": "geojson" + }, + { + "download_count": 3828132, + "project": "robotframework" + }, + { + "download_count": 3820453, + "project": "gcsfs" + }, + { + "download_count": 3810489, + "project": "convertdate" + }, + { + "download_count": 3809802, + "project": "sockjs-tornado" + }, + { + "download_count": 3799689, + "project": "multipledispatch" + }, + { + "download_count": 3798810, + "project": "weasyprint" + }, + { + "download_count": 3793665, + "project": "tomlkit" + }, + { + "download_count": 3792308, + "project": "python-snappy" + }, + { + "download_count": 3787259, + "project": "django-model-utils" + }, + { + "download_count": 3780397, + "project": "distributed" + }, + { + "download_count": 3775038, + "project": "grequests" + }, + { + "download_count": 3771741, + "project": "flask-bcrypt" + }, + { + "download_count": 3769931, + "project": "fakeredis" + }, + { + "download_count": 3752939, + "project": "schedule" + }, + { + "download_count": 3746896, + "project": "validators" + }, + { + "download_count": 3721493, + "project": "knack" + }, + { + "download_count": 3693854, + "project": "pox" + }, + { + "download_count": 3682964, + "project": "sshtunnel" + }, + { + "download_count": 3681065, + "project": "tftpy" + }, + { + "download_count": 3676291, + "project": "pdfminer" + }, + { + "download_count": 3664933, + "project": "google-compute-engine" + }, + { + "download_count": 3647507, + "project": "graphene" + }, + { + "download_count": 3639253, + "project": "setuptools-git" + }, + { + "download_count": 3630380, + "project": "unittest-xml-reporting" + }, + { + "download_count": 3627156, + "project": "ciso8601" + }, + { + "download_count": 3627033, + "project": "sockjs" + }, + { + "download_count": 3625069, + "project": "shortuuid" + }, + { + "download_count": 3616592, + "project": "ray" + }, + { + "download_count": 3613699, + "project": "ppft" + }, + { + "download_count": 3597147, + "project": "shap" + }, + { + "download_count": 3590917, + "project": "azureml-model-management-sdk" + }, + { + "download_count": 3588391, + "project": "pygsheets" + }, + { + "download_count": 3584999, + "project": "flask-swagger" + }, + { + "download_count": 3575551, + "project": "cssutils" + }, + { + "download_count": 3568283, + "project": "pattern" + }, + { + "download_count": 3549188, + "project": "pylev" + }, + { + "download_count": 3544798, + "project": "ibm-db-sa" + }, + { + "download_count": 3526181, + "project": "pyathenajdbc" + }, + { + "download_count": 3518011, + "project": "pylint-plugin-utils" + }, + { + "download_count": 3517988, + "project": "pg8000" + }, + { + "download_count": 3517712, + "project": "tensorflow-model-analysis" + }, + { + "download_count": 3507991, + "project": "os-service-types" + }, + { + "download_count": 3489788, + "project": "python-swiftclient" + }, + { + "download_count": 3477450, + "project": "openstacksdk" + }, + { + "download_count": 3465240, + "project": "cfn-flip" + }, + { + "download_count": 3459223, + "project": "catkin-pkg" + }, + { + "download_count": 3455963, + "project": "cleo" + }, + { + "download_count": 3448945, + "project": "python-keystoneclient" + }, + { + "download_count": 3448335, + "project": "jellyfish" + }, + { + "download_count": 3444950, + "project": "apispec" + }, + { + "download_count": 3443490, + "project": "pastel" + }, + { + "download_count": 3434078, + "project": "django-tables2" + }, + { + "download_count": 3429540, + "project": "qrcode" + }, + { + "download_count": 3426160, + "project": "collectd-nvidianvml" + }, + { + "download_count": 3420045, + "project": "apache-airflow" + }, + { + "download_count": 3411604, + "project": "prison" + }, + { + "download_count": 3402478, + "project": "pefile" + }, + { + "download_count": 3393690, + "project": "commonmark" + }, + { + "download_count": 3388484, + "project": "tablib" + }, + { + "download_count": 3384168, + "project": "ntlm-auth" + }, + { + "download_count": 3377675, + "project": "geopandas" + }, + { + "download_count": 3366350, + "project": "jsmin" + }, + { + "download_count": 3361635, + "project": "antlr4-python3-runtime" + }, + { + "download_count": 3340033, + "project": "polyaxon-dockerizer" + }, + { + "download_count": 3293582, + "project": "odfpy" + }, + { + "download_count": 3269264, + "project": "openapi-codec" + }, + { + "download_count": 3258675, + "project": "utm" + }, + { + "download_count": 3251855, + "project": "pyvmomi" + }, + { + "download_count": 3251588, + "project": "poetry" + }, + { + "download_count": 3247520, + "project": "bitarray" + }, + { + "download_count": 3244587, + "project": "python-crontab" + }, + { + "download_count": 3243979, + "project": "django-mysql" + }, + { + "download_count": 3242901, + "project": "databricks-pypi1" + }, + { + "download_count": 3238235, + "project": "marshmallow-sqlalchemy" + }, + { + "download_count": 3226761, + "project": "emoji" + }, + { + "download_count": 3224704, + "project": "initools" + }, + { + "download_count": 3209542, + "project": "capstone" + }, + { + "download_count": 3200795, + "project": "djangorestframework-jwt" + }, + { + "download_count": 3184641, + "project": "django-rest-swagger" + }, + { + "download_count": 3181604, + "project": "tensorflow-hub" + }, + { + "download_count": 3179141, + "project": "ratelimit" + }, + { + "download_count": 3176283, + "project": "asyncio" + }, + { + "download_count": 3176119, + "project": "spark-sklearn" + }, + { + "download_count": 3173008, + "project": "paste" + }, + { + "download_count": 3169917, + "project": "pytest-asyncio" + }, + { + "download_count": 3159532, + "project": "django-crispy-forms" + }, + { + "download_count": 3156134, + "project": "cachy" + }, + { + "download_count": 3150001, + "project": "asgiref" + }, + { + "download_count": 3138323, + "project": "django-environ" + }, + { + "download_count": 3127100, + "project": "fire" + }, + { + "download_count": 3123851, + "project": "salesforce-bulk" + }, + { + "download_count": 3117730, + "project": "lightstep" + }, + { + "download_count": 3116358, + "project": "azure-cli-core" + }, + { + "download_count": 3110959, + "project": "recommonmark" + }, + { + "download_count": 3095813, + "project": "pysqlite" + }, + { + "download_count": 3088484, + "project": "clickclick" + }, + { + "download_count": 3077942, + "project": "heapdict" + }, + { + "download_count": 3077928, + "project": "google-cloud-dataflow" + }, + { + "download_count": 3073863, + "project": "spotinst-agent" + }, + { + "download_count": 3073217, + "project": "analytics-python" + }, + { + "download_count": 3065872, + "project": "nose-timer" + }, + { + "download_count": 3064209, + "project": "rq" + }, + { + "download_count": 3062467, + "project": "wandb" + }, + { + "download_count": 3060966, + "project": "jsonfield" + }, + { + "download_count": 3050206, + "project": "pyinotify" + }, + { + "download_count": 3048455, + "project": "pygame" + }, + { + "download_count": 3043542, + "project": "intel-openmp" + }, + { + "download_count": 3042574, + "project": "zict" + }, + { + "download_count": 3040916, + "project": "pytest-split-tests" + }, + { + "download_count": 3036872, + "project": "pep8-naming" + }, + { + "download_count": 3029439, + "project": "ordered-set" + }, + { + "download_count": 3025549, + "project": "graphql-relay" + }, + { + "download_count": 3019093, + "project": "troposphere" + }, + { + "download_count": 3009250, + "project": "azure-kusto-data" + }, + { + "download_count": 3008025, + "project": "opencv-contrib-python" + }, + { + "download_count": 3003750, + "project": "requests-ntlm" + }, + { + "download_count": 3003003, + "project": "tb-nightly" + }, + { + "download_count": 2996766, + "project": "credstash" + }, + { + "download_count": 2989520, + "project": "flask-appbuilder" + }, + { + "download_count": 2980537, + "project": "plumbum" + }, + { + "download_count": 2973597, + "project": "pager" + }, + { + "download_count": 2967237, + "project": "schema" + }, + { + "download_count": 2965535, + "project": "mkl" + }, + { + "download_count": 2963377, + "project": "blessed" + }, + { + "download_count": 2953182, + "project": "datashape" + }, + { + "download_count": 2941855, + "project": "validate-email" + }, + { + "download_count": 2939744, + "project": "pylint-django" + }, + { + "download_count": 2938945, + "project": "webapp2" + }, + { + "download_count": 2936891, + "project": "livereload" + }, + { + "download_count": 2935073, + "project": "cvxopt" + }, + { + "download_count": 2934589, + "project": "cement" + }, + { + "download_count": 2931314, + "project": "tfx-bsl" + }, + { + "download_count": 2922270, + "project": "rospkg" + }, + { + "download_count": 2912677, + "project": "flaky" + }, + { + "download_count": 2909121, + "project": "filemagic" + }, + { + "download_count": 2902933, + "project": "msgpack-numpy" + }, + { + "download_count": 2895921, + "project": "uamqp" + }, + { + "download_count": 2895636, + "project": "accumulation-tree" + }, + { + "download_count": 2894366, + "project": "pyudorandom" + }, + { + "download_count": 2892673, + "project": "tdigest" + }, + { + "download_count": 2888615, + "project": "tensorflow-data-validation" + }, + { + "download_count": 2886531, + "project": "python-subunit" + }, + { + "download_count": 2878388, + "project": "gitdb" + }, + { + "download_count": 2874189, + "project": "python-novaclient" + }, + { + "download_count": 2857065, + "project": "asyncpg" + }, + { + "download_count": 2847295, + "project": "social-auth-core" + }, + { + "download_count": 2838600, + "project": "azure-cli-nspkg" + }, + { + "download_count": 2838428, + "project": "requestsexceptions" + }, + { + "download_count": 2834024, + "project": "filechunkio" + }, + { + "download_count": 2828975, + "project": "argon2-cffi" + }, + { + "download_count": 2822266, + "project": "beautifulsoup" + }, + { + "download_count": 2821979, + "project": "smmap" + }, + { + "download_count": 2819754, + "project": "django-multiselectfield" + }, + { + "download_count": 2815640, + "project": "drf-yasg" + }, + { + "download_count": 2813694, + "project": "boltons" + }, + { + "download_count": 2810269, + "project": "httpretty" + }, + { + "download_count": 2806190, + "project": "pyqt5" + }, + { + "download_count": 2802770, + "project": "hashids" + }, + { + "download_count": 2792830, + "project": "pdfrw" + }, + { + "download_count": 2792334, + "project": "flask-openid" + }, + { + "download_count": 2791834, + "project": "gapic-google-cloud-error-reporting-v1beta1" + }, + { + "download_count": 2790983, + "project": "cookiejar" + }, + { + "download_count": 2788259, + "project": "proto-google-cloud-error-reporting-v1beta1" + }, + { + "download_count": 2779755, + "project": "flask-marshmallow" + }, + { + "download_count": 2753420, + "project": "pyinstaller" + }, + { + "download_count": 2752867, + "project": "sqlalchemy-redshift" + }, + { + "download_count": 2749279, + "project": "python-logstash" + }, + { + "download_count": 2747409, + "project": "django-nose" + }, + { + "download_count": 2744486, + "project": "azure-cosmos" + }, + { + "download_count": 2738853, + "project": "verboselogs" + }, + { + "download_count": 2724920, + "project": "googlemaps" + }, + { + "download_count": 2722861, + "project": "social-auth-app-django" + }, + { + "download_count": 2706844, + "project": "async-generator" + }, + { + "download_count": 2704711, + "project": "funcy" + }, + { + "download_count": 2703274, + "project": "clint" + }, + { + "download_count": 2701212, + "project": "pytest-sugar" + }, + { + "download_count": 2699840, + "project": "django-timezone-field" + }, + { + "download_count": 2697450, + "project": "jaydebeapi" + }, + { + "download_count": 2693049, + "project": "brotlipy" + }, + { + "download_count": 2686973, + "project": "args" + }, + { + "download_count": 2683870, + "project": "vcrpy" + }, + { + "download_count": 2677855, + "project": "marshmallow-enum" + }, + { + "download_count": 2673327, + "project": "peewee" + }, + { + "download_count": 2670889, + "project": "osc-lib" + }, + { + "download_count": 2670484, + "project": "langdetect" + }, + { + "download_count": 2663228, + "project": "enum" + }, + { + "download_count": 2655265, + "project": "azure-cli-telemetry" + }, + { + "download_count": 2651881, + "project": "tables" + }, + { + "download_count": 2649758, + "project": "pastedeploy" + }, + { + "download_count": 2646163, + "project": "swagger-spec-validator" + }, + { + "download_count": 2644724, + "project": "tld" + }, + { + "download_count": 2642975, + "project": "kafka" + }, + { + "download_count": 2641270, + "project": "cchardet" + }, + { + "download_count": 2636532, + "project": "timezonefinder" + }, + { + "download_count": 2634114, + "project": "mongoengine" + }, + { + "download_count": 2615568, + "project": "python-crfsuite" + }, + { + "download_count": 2600491, + "project": "timeout-decorator" + }, + { + "download_count": 2592520, + "project": "rjsmin" + }, + { + "download_count": 2589546, + "project": "brunel" + }, + { + "download_count": 2585708, + "project": "autobahn" + }, + { + "download_count": 2584709, + "project": "webargs" + }, + { + "download_count": 2584111, + "project": "pyvirtualdisplay" + }, + { + "download_count": 2580140, + "project": "descartes" + }, + { + "download_count": 2551557, + "project": "cassandra-driver" + }, + { + "download_count": 2549257, + "project": "aws-requests-auth" + }, + { + "download_count": 2540875, + "project": "rope" + }, + { + "download_count": 2538617, + "project": "aiofiles" + }, + { + "download_count": 2532557, + "project": "pycountry-convert" + }, + { + "download_count": 2528277, + "project": "branca" + }, + { + "download_count": 2524264, + "project": "mechanize" + }, + { + "download_count": 2519234, + "project": "mysql-connector-python-rf" + }, + { + "download_count": 2517497, + "project": "pywebhdfs" + }, + { + "download_count": 2503645, + "project": "folium" + }, + { + "download_count": 2498263, + "project": "aiohttp-cors" + }, + { + "download_count": 2497590, + "project": "flask-httpauth" + }, + { + "download_count": 2495242, + "project": "django-ipware" + }, + { + "download_count": 2494397, + "project": "jupyterlab" + }, + { + "download_count": 2493673, + "project": "pybind11" + }, + { + "download_count": 2492477, + "project": "diff-match-patch" + }, + { + "download_count": 2491248, + "project": "jupyter-pip" + }, + { + "download_count": 2488659, + "project": "dpath" + }, + { + "download_count": 2488591, + "project": "marionette-driver" + }, + { + "download_count": 2484149, + "project": "dotnetcore2" + }, + { + "download_count": 2478052, + "project": "pythonwhois" + }, + { + "download_count": 2470002, + "project": "google-cloud-dataproc" + }, + { + "download_count": 2458163, + "project": "enum-compat" + }, + { + "download_count": 2455272, + "project": "awsebcli" + }, + { + "download_count": 2454145, + "project": "django-celery-beat" + }, + { + "download_count": 2453795, + "project": "rfc3987" + }, + { + "download_count": 2447431, + "project": "py-bcrypt" + }, + { + "download_count": 2442569, + "project": "python-gitlab" + }, + { + "download_count": 2439713, + "project": "translationstring" + }, + { + "download_count": 2439355, + "project": "yq" + }, + { + "download_count": 2435098, + "project": "pysnmp" + }, + { + "download_count": 2432521, + "project": "first" + }, + { + "download_count": 2429585, + "project": "hpack" + }, + { + "download_count": 2428283, + "project": "python-glanceclient" + }, + { + "download_count": 2422100, + "project": "venusian" + }, + { + "download_count": 2416591, + "project": "bitstring" + }, + { + "download_count": 2408841, + "project": "flake8-docstrings" + }, + { + "download_count": 2407495, + "project": "attrdict" + }, + { + "download_count": 2404932, + "project": "ws4py" + }, + { + "download_count": 2402857, + "project": "os-client-config" + }, + { + "download_count": 2401078, + "project": "locustio" + }, + { + "download_count": 2398281, + "project": "junit-xml" + }, + { + "download_count": 2395343, + "project": "mozversion" + }, + { + "download_count": 2395052, + "project": "azureml-dataprep" + }, + { + "download_count": 2390036, + "project": "sshpubkeys" + }, + { + "download_count": 2387469, + "project": "h2" + }, + { + "download_count": 2386629, + "project": "ansible-lint" + }, + { + "download_count": 2381639, + "project": "txaio" + }, + { + "download_count": 2380783, + "project": "wget" + }, + { + "download_count": 2375129, + "project": "pytest-rerunfailures" + }, + { + "download_count": 2371842, + "project": "oslo-log" + }, + { + "download_count": 2370221, + "project": "hyperframe" + }, + { + "download_count": 2364172, + "project": "python-openid" + }, + { + "download_count": 2357263, + "project": "flask-jwt-extended" + }, + { + "download_count": 2354920, + "project": "azureml-dataprep-native" + }, + { + "download_count": 2346411, + "project": "flake8-import-order" + }, + { + "download_count": 2334525, + "project": "pypandoc" + }, + { + "download_count": 2329461, + "project": "pysmi" + }, + { + "download_count": 2328121, + "project": "json-merge-patch" + }, + { + "download_count": 2325050, + "project": "falcon" + }, + { + "download_count": 2314962, + "project": "google-cloud-automl" + }, + { + "download_count": 2313548, + "project": "azure-kusto-ingest" + }, + { + "download_count": 2311574, + "project": "aioredis" + }, + { + "download_count": 2307595, + "project": "py-cpuinfo" + }, + { + "download_count": 2305070, + "project": "imbalanced-learn" + }, + { + "download_count": 2304296, + "project": "django-compressor" + }, + { + "download_count": 2304263, + "project": "memoized-property" + }, + { + "download_count": 2304114, + "project": "azureml-telemetry" + }, + { + "download_count": 2301461, + "project": "textblob" + }, + { + "download_count": 2299510, + "project": "snowflake-sqlalchemy" + }, + { + "download_count": 2287102, + "project": "schematics" + }, + { + "download_count": 2276329, + "project": "virtualenvwrapper" + }, + { + "download_count": 2272329, + "project": "aws-encryption-sdk" + }, + { + "download_count": 2272227, + "project": "opencensus" + }, + { + "download_count": 2267894, + "project": "django-allauth" + }, + { + "download_count": 2267072, + "project": "ibm-db" + }, + { + "download_count": 2258528, + "project": "python-cinderclient" + }, + { + "download_count": 2252312, + "project": "objectpath" + }, + { + "download_count": 2242218, + "project": "tf-estimator-nightly" + }, + { + "download_count": 2231619, + "project": "flask-compress" + }, + { + "download_count": 2224267, + "project": "azureml-pipeline-core" + }, + { + "download_count": 2221757, + "project": "connexion" + }, + { + "download_count": 2219740, + "project": "django-phonenumber-field" + }, + { + "download_count": 2214496, + "project": "warlock" + }, + { + "download_count": 2213923, + "project": "pyqt5-sip" + }, + { + "download_count": 2210221, + "project": "phonenumberslite" + }, + { + "download_count": 2209512, + "project": "oslo-context" + }, + { + "download_count": 2194021, + "project": "azure-cli-command-modules-nspkg" + }, + { + "download_count": 2185051, + "project": "pathlib-mate" + }, + { + "download_count": 2184347, + "project": "jsonref" + }, + { + "download_count": 2182555, + "project": "pytimeparse" + }, + { + "download_count": 2180696, + "project": "databricks-pypi2" + }, + { + "download_count": 2178821, + "project": "natsort" + }, + { + "download_count": 2176243, + "project": "ipaddr" + }, + { + "download_count": 2171374, + "project": "path-py" + }, + { + "download_count": 2170378, + "project": "azure-mgmt-hdinsight" + }, + { + "download_count": 2153590, + "project": "firebase-admin" + }, + { + "download_count": 2150903, + "project": "azureml-train-core" + }, + { + "download_count": 2148663, + "project": "pypyodbc" + }, + { + "download_count": 2145885, + "project": "uszipcode" + }, + { + "download_count": 2145383, + "project": "azureml-train-restclients-hyperdrive" + }, + { + "download_count": 2142865, + "project": "premailer" + }, + { + "download_count": 2137325, + "project": "h11" + }, + { + "download_count": 2132743, + "project": "pyformance" + }, + { + "download_count": 2132535, + "project": "shellescape" + }, + { + "download_count": 2130341, + "project": "django-import-export" + }, + { + "download_count": 2127667, + "project": "wsaccel" + }, + { + "download_count": 2126611, + "project": "django-js-asset" + }, + { + "download_count": 2126191, + "project": "snakebite" + }, + { + "download_count": 2124659, + "project": "wordcloud" + }, + { + "download_count": 2109163, + "project": "antlr4-python2-runtime" + }, + { + "download_count": 2099008, + "project": "naked" + }, + { + "download_count": 2098854, + "project": "jinja2-cli" + }, + { + "download_count": 2097764, + "project": "onnx" + }, + { + "download_count": 2081320, + "project": "pytesseract" + }, + { + "download_count": 2076961, + "project": "azureml-pipeline-steps" + }, + { + "download_count": 2073133, + "project": "flask-testing" + }, + { + "download_count": 2072907, + "project": "pytest-env" + }, + { + "download_count": 2072150, + "project": "django-widget-tweaks" + }, + { + "download_count": 2070728, + "project": "django-webpack-loader" + }, + { + "download_count": 2069730, + "project": "azureml-pipeline" + }, + { + "download_count": 2069241, + "project": "mrjob" + }, + { + "download_count": 2055974, + "project": "public" + }, + { + "download_count": 2053631, + "project": "python-whois" + }, + { + "download_count": 2052521, + "project": "safety" + }, + { + "download_count": 2038912, + "project": "azure-multiapi-storage" + }, + { + "download_count": 2038114, + "project": "google-cloud-tasks" + }, + { + "download_count": 2037912, + "project": "partd" + }, + { + "download_count": 2033573, + "project": "rcssmin" + }, + { + "download_count": 2032537, + "project": "uuid" + }, + { + "download_count": 2030463, + "project": "azureml-train" + }, + { + "download_count": 2028467, + "project": "vsts-cd-manager" + }, + { + "download_count": 2025661, + "project": "pyjks" + }, + { + "download_count": 2025022, + "project": "flake8-quotes" + }, + { + "download_count": 2022199, + "project": "python-socketio" + }, + { + "download_count": 2021994, + "project": "slimit" + }, + { + "download_count": 2021337, + "project": "pygeocoder" + }, + { + "download_count": 2020656, + "project": "javaobj-py3" + }, + { + "download_count": 2019345, + "project": "tweepy" + }, + { + "download_count": 2015977, + "project": "grpc-google-logging-v2" + }, + { + "download_count": 2013359, + "project": "twofish" + }, + { + "download_count": 2010440, + "project": "urwid" + }, + { + "download_count": 2008501, + "project": "pyathena" + }, + { + "download_count": 2004648, + "project": "azureml-sdk" + }, + { + "download_count": 2002586, + "project": "pdfminer-six" + }, + { + "download_count": 2000934, + "project": "grpc-google-pubsub-v1" + }, + { + "download_count": 1999960, + "project": "astral" + }, + { + "download_count": 1996773, + "project": "python-box" + }, + { + "download_count": 1992382, + "project": "python-openstackclient" + }, + { + "download_count": 1987939, + "project": "toposort" + }, + { + "download_count": 1984547, + "project": "httptools" + }, + { + "download_count": 1980989, + "project": "asynctest" + }, + { + "download_count": 1978811, + "project": "pycalverter" + }, + { + "download_count": 1975990, + "project": "django-mptt" + }, + { + "download_count": 1974600, + "project": "nameparser" + }, + { + "download_count": 1974472, + "project": "geomet" + }, + { + "download_count": 1974084, + "project": "rtree" + }, + { + "download_count": 1970886, + "project": "gax-google-logging-v2" + }, + { + "download_count": 1967604, + "project": "openapi-spec-validator" + }, + { + "download_count": 1966141, + "project": "simpleeval" + }, + { + "download_count": 1965371, + "project": "gax-google-pubsub-v1" + }, + { + "download_count": 1964155, + "project": "pympler" + }, + { + "download_count": 1957946, + "project": "pint" + }, + { + "download_count": 1954321, + "project": "django-celery-results" + }, + { + "download_count": 1950586, + "project": "oauth2" + }, + { + "download_count": 1947313, + "project": "collections-extended" + }, + { + "download_count": 1943588, + "project": "dparse" + }, + { + "download_count": 1937747, + "project": "azure-mgmt-botservice" + }, + { + "download_count": 1935888, + "project": "facebook-business" + }, + { + "download_count": 1932910, + "project": "django-localflavor" + }, + { + "download_count": 1931470, + "project": "slackweb" + }, + { + "download_count": 1919103, + "project": "azure-eventhub" + }, + { + "download_count": 1918652, + "project": "django-braces" + }, + { + "download_count": 1917375, + "project": "fake-useragent" + }, + { + "download_count": 1916732, + "project": "python-engineio" + }, + { + "download_count": 1904465, + "project": "django-countries" + }, + { + "download_count": 1901273, + "project": "ptvsd" + }, + { + "download_count": 1899393, + "project": "orderedmultidict" + }, + { + "download_count": 1897121, + "project": "jwcrypto" + }, + { + "download_count": 1895022, + "project": "azure-mgmt-security" + }, + { + "download_count": 1893082, + "project": "awacs" + }, + { + "download_count": 1889385, + "project": "azure-functions-devops-build" + }, + { + "download_count": 1884376, + "project": "locket" + }, + { + "download_count": 1882404, + "project": "ctutlz" + }, + { + "download_count": 1875062, + "project": "snapshottest" + }, + { + "download_count": 1874184, + "project": "pdfkit" + }, + { + "download_count": 1870591, + "project": "scapy" + }, + { + "download_count": 1869037, + "project": "opencensus-context" + }, + { + "download_count": 1862753, + "project": "flask-mail" + }, + { + "download_count": 1860985, + "project": "intervaltree" + }, + { + "download_count": 1856012, + "project": "azure-mgmt-sqlvirtualmachine" + }, + { + "download_count": 1853788, + "project": "azure-mgmt-kusto" + }, + { + "download_count": 1853245, + "project": "luigi" + }, + { + "download_count": 1852083, + "project": "pylru" + }, + { + "download_count": 1848356, + "project": "sklearn-pandas" + }, + { + "download_count": 1846838, + "project": "pydantic" + }, + { + "download_count": 1845633, + "project": "email-validator" + }, + { + "download_count": 1844376, + "project": "pyquery" + }, + { + "download_count": 1841139, + "project": "django-oauth-toolkit" + }, + { + "download_count": 1839835, + "project": "memory-profiler" + }, + { + "download_count": 1839825, + "project": "jupyterlab-server" + }, + { + "download_count": 1835726, + "project": "sqlalchemy-migrate" + }, + { + "download_count": 1832053, + "project": "retry-decorator" + }, + { + "download_count": 1830194, + "project": "robotframework-seleniumlibrary" + }, + { + "download_count": 1825914, + "project": "koalas" + }, + { + "download_count": 1822090, + "project": "amazon-dax-client" + }, + { + "download_count": 1821759, + "project": "python-nvd3" + }, + { + "download_count": 1818147, + "project": "utlz" + }, + { + "download_count": 1813328, + "project": "requests-kerberos" + }, + { + "download_count": 1803051, + "project": "ftfy" + }, + { + "download_count": 1798001, + "project": "crypto" + }, + { + "download_count": 1792237, + "project": "distlib" + }, + { + "download_count": 1791068, + "project": "wordsegment" + }, + { + "download_count": 1790178, + "project": "django-taggit" + }, + { + "download_count": 1783750, + "project": "suds" + }, + { + "download_count": 1782898, + "project": "fabric3" + }, + { + "download_count": 1782756, + "project": "socksipy-branch" + }, + { + "download_count": 1778530, + "project": "webcolors" + }, + { + "download_count": 1773769, + "project": "orderedset" + }, + { + "download_count": 1770892, + "project": "mxnet" + }, + { + "download_count": 1767740, + "project": "mixpanel" + }, + { + "download_count": 1766756, + "project": "python-stdnum" + }, + { + "download_count": 1765611, + "project": "polib" + }, + { + "download_count": 1762017, + "project": "pysaml2" + }, + { + "download_count": 1760938, + "project": "pywinpty" + }, + { + "download_count": 1760472, + "project": "curlify" + }, + { + "download_count": 1759532, + "project": "dulwich" + }, + { + "download_count": 1755858, + "project": "tzwhere" + }, + { + "download_count": 1753697, + "project": "pyotp" + }, + { + "download_count": 1752520, + "project": "dropbox" + }, + { + "download_count": 1748789, + "project": "thriftpy" + }, + { + "download_count": 1744492, + "project": "yattag" + }, + { + "download_count": 1744207, + "project": "xxhash" + }, + { + "download_count": 1740901, + "project": "colorlover" + }, + { + "download_count": 1740812, + "project": "mkdocs" + }, + { + "download_count": 1723311, + "project": "iso3166" + }, + { + "download_count": 1722795, + "project": "gcs-oauth2-boto-plugin" + }, + { + "download_count": 1720946, + "project": "protorpc" + }, + { + "download_count": 1717972, + "project": "sentinels" + }, + { + "download_count": 1716396, + "project": "pykalman" + }, + { + "download_count": 1715123, + "project": "pkgconfig" + }, + { + "download_count": 1714704, + "project": "geohash" + }, + { + "download_count": 1712854, + "project": "google-cloud-dlp" + }, + { + "download_count": 1711556, + "project": "resampy" + }, + { + "download_count": 1705164, + "project": "request" + }, + { + "download_count": 1696070, + "project": "usaddress" + }, + { + "download_count": 1694720, + "project": "superlance" + }, + { + "download_count": 1692010, + "project": "librato-metrics" + }, + { + "download_count": 1690356, + "project": "flask-oauthlib" + }, + { + "download_count": 1686047, + "project": "google-cloud-texttospeech" + }, + { + "download_count": 1677666, + "project": "post" + }, + { + "download_count": 1675876, + "project": "get" + }, + { + "download_count": 1669578, + "project": "daphne" + }, + { + "download_count": 1665895, + "project": "librosa" + }, + { + "download_count": 1665557, + "project": "pyelftools" + }, + { + "download_count": 1665384, + "project": "query-string" + }, + { + "download_count": 1663244, + "project": "pywinrm" + }, + { + "download_count": 1660863, + "project": "pyreadline" + }, + { + "download_count": 1657504, + "project": "ez-setup" + }, + { + "download_count": 1656438, + "project": "channels" + }, + { + "download_count": 1640299, + "project": "node-semver" + }, + { + "download_count": 1638276, + "project": "tensorboardx" + }, + { + "download_count": 1631659, + "project": "htmlmin" + }, + { + "download_count": 1625146, + "project": "tensorflow-datasets" + }, + { + "download_count": 1624914, + "project": "audioread" + }, + { + "download_count": 1621703, + "project": "couchdb" + }, + { + "download_count": 1618223, + "project": "google-reauth" + }, + { + "download_count": 1616648, + "project": "google-cloud-redis" + }, + { + "download_count": 1615335, + "project": "autograd" + }, + { + "download_count": 1609038, + "project": "rollbar" + }, + { + "download_count": 1608426, + "project": "pyu2f" + }, + { + "download_count": 1603406, + "project": "iptools" + }, + { + "download_count": 1601716, + "project": "compatibility-lib" + }, + { + "download_count": 1599718, + "project": "google-cloud-asset" + }, + { + "download_count": 1599709, + "project": "azure-mgmt-privatedns" + }, + { + "download_count": 1596670, + "project": "python-decouple" + }, + { + "download_count": 1592734, + "project": "oslo-concurrency" + }, + { + "download_count": 1590149, + "project": "mongomock" + }, + { + "download_count": 1590067, + "project": "fluent-logger" + }, + { + "download_count": 1589332, + "project": "pygrok" + }, + { + "download_count": 1586920, + "project": "rauth" + }, + { + "download_count": 1585024, + "project": "probableparsing" + }, + { + "download_count": 1580625, + "project": "dominate" + }, + { + "download_count": 1577725, + "project": "pykerberos" + }, + { + "download_count": 1577380, + "project": "pyramid" + }, + { + "download_count": 1575279, + "project": "flask-cache" + }, + { + "download_count": 1575048, + "project": "pytest-cache" + }, + { + "download_count": 1574450, + "project": "pyee" + }, + { + "download_count": 1572539, + "project": "bingads" + }, + { + "download_count": 1569151, + "project": "appium-python-client" + }, + { + "download_count": 1567159, + "project": "pygam" + }, + { + "download_count": 1564680, + "project": "fysom" + }, + { + "download_count": 1563117, + "project": "tempita" + }, + { + "download_count": 1561979, + "project": "pywin32-ctypes" + }, + { + "download_count": 1561323, + "project": "diskcache" + }, + { + "download_count": 1558407, + "project": "pyhs2" + }, + { + "download_count": 1556417, + "project": "frozendict" + }, + { + "download_count": 1556392, + "project": "immutables" + }, + { + "download_count": 1550611, + "project": "python-neutronclient" + }, + { + "download_count": 1549879, + "project": "gspread-dataframe" + }, + { + "download_count": 1545947, + "project": "pyro4" + }, + { + "download_count": 1539049, + "project": "vertica-python" + }, + { + "download_count": 1538249, + "project": "google-cloud-securitycenter" + }, + { + "download_count": 1532048, + "project": "m3u8" + }, + { + "download_count": 1530674, + "project": "serpent" + }, + { + "download_count": 1527389, + "project": "aiobotocore" + }, + { + "download_count": 1526900, + "project": "django-reversion" + }, + { + "download_count": 1525911, + "project": "tox-travis" + }, + { + "download_count": 1524549, + "project": "pluginbase" + }, + { + "download_count": 1523680, + "project": "google-cloud-iot" + }, + { + "download_count": 1523139, + "project": "pykafka" + }, + { + "download_count": 1522621, + "project": "anyconfig" + }, + { + "download_count": 1520539, + "project": "pyjwkest" + }, + { + "download_count": 1520176, + "project": "django-formtools" + }, + { + "download_count": 1519701, + "project": "vowpalwabbit" + }, + { + "download_count": 1518864, + "project": "gprof2dot" + }, + { + "download_count": 1517841, + "project": "presto-python-client" + }, + { + "download_count": 1515284, + "project": "delorean" + }, + { + "download_count": 1514817, + "project": "json5" + }, + { + "download_count": 1511462, + "project": "num2words" + }, + { + "download_count": 1507178, + "project": "pylibmc" + }, + { + "download_count": 1505966, + "project": "httpagentparser" + }, + { + "download_count": 1504331, + "project": "drf-nested-routers" + }, + { + "download_count": 1504075, + "project": "icalendar" + }, + { + "download_count": 1503765, + "project": "google-cloud-websecurityscanner" + }, + { + "download_count": 1501399, + "project": "lru-dict" + }, + { + "download_count": 1496923, + "project": "cloudant" + }, + { + "download_count": 1493340, + "project": "keyrings-alt" + }, + { + "download_count": 1492739, + "project": "cattrs" + }, + { + "download_count": 1491297, + "project": "model-mommy" + }, + { + "download_count": 1490933, + "project": "jenkinsapi" + }, + { + "download_count": 1488901, + "project": "workalendar" + }, + { + "download_count": 1486683, + "project": "lifetimes" + }, + { + "download_count": 1484449, + "project": "sseclient-py" + }, + { + "download_count": 1481519, + "project": "python-etcd" + }, + { + "download_count": 1480386, + "project": "testinfra" + }, + { + "download_count": 1479219, + "project": "sentencepiece" + }, + { + "download_count": 1479194, + "project": "scikit-optimize" + }, + { + "download_count": 1477712, + "project": "flask-responses" + }, + { + "download_count": 1468207, + "project": "django-polymorphic" + }, + { + "download_count": 1467601, + "project": "azure-mgmt-deploymentmanager" + }, + { + "download_count": 1464092, + "project": "routes" + }, + { + "download_count": 1463152, + "project": "editdistance" + }, + { + "download_count": 1460523, + "project": "bugsnag" + }, + { + "download_count": 1453426, + "project": "conan" + }, + { + "download_count": 1449766, + "project": "autowrapt" + }, + { + "download_count": 1448235, + "project": "fasttext" + }, + { + "download_count": 1445709, + "project": "django-rest-auth" + }, + { + "download_count": 1444092, + "project": "catboost" + }, + { + "download_count": 1442809, + "project": "pydash" + }, + { + "download_count": 1442503, + "project": "libsass" + }, + { + "download_count": 1441996, + "project": "importlib" + }, + { + "download_count": 1440920, + "project": "pytest-flask" + }, + { + "download_count": 1440731, + "project": "django-simple-history" + }, + { + "download_count": 1439129, + "project": "django-picklefield" + }, + { + "download_count": 1437255, + "project": "trollius" + }, + { + "download_count": 1433413, + "project": "ml-metadata" + }, + { + "download_count": 1428493, + "project": "port-for" + }, + { + "download_count": 1426881, + "project": "flake8-bugbear" + }, + { + "download_count": 1425070, + "project": "python-nmap" + }, + { + "download_count": 1424275, + "project": "newlinejson" + }, + { + "download_count": 1423507, + "project": "pytest-benchmark" + }, + { + "download_count": 1422061, + "project": "hacking" + }, + { + "download_count": 1420833, + "project": "ratelim" + }, + { + "download_count": 1416683, + "project": "rdflib" + }, + { + "download_count": 1415247, + "project": "ninja" + }, + { + "download_count": 1413811, + "project": "geocoder" + }, + { + "download_count": 1413778, + "project": "parsimonious" + }, + { + "download_count": 1409060, + "project": "xmlsec" + }, + { + "download_count": 1407612, + "project": "jsonpath-ng" + }, + { + "download_count": 1404958, + "project": "authy" + }, + { + "download_count": 1399670, + "project": "python3-saml" + }, + { + "download_count": 1399023, + "project": "django-ratelimit" + }, + { + "download_count": 1398229, + "project": "watson-machine-learning-client" + }, + { + "download_count": 1397882, + "project": "motor" + }, + { + "download_count": 1397503, + "project": "pyusb" + }, + { + "download_count": 1393071, + "project": "eli5" + }, + { + "download_count": 1392124, + "project": "facebook-sdk" + }, + { + "download_count": 1391265, + "project": "py-zabbix" + }, + { + "download_count": 1390039, + "project": "threatconnect" + }, + { + "download_count": 1389772, + "project": "github3-py" + }, + { + "download_count": 1384962, + "project": "dash-renderer" + }, + { + "download_count": 1384373, + "project": "pyzipcode3" + }, + { + "download_count": 1384208, + "project": "transaction" + }, + { + "download_count": 1377748, + "project": "dash" + }, + { + "download_count": 1377392, + "project": "contextvars" + }, + { + "download_count": 1375491, + "project": "pyppeteer" + }, + { + "download_count": 1374745, + "project": "imutils" + }, + { + "download_count": 1373022, + "project": "predicthq" + }, + { + "download_count": 1371449, + "project": "furl" + }, + { + "download_count": 1370079, + "project": "graypy" + }, + { + "download_count": 1368582, + "project": "ipy" + }, + { + "download_count": 1365609, + "project": "apache-libcloud" + }, + { + "download_count": 1363504, + "project": "langid" + }, + { + "download_count": 1362248, + "project": "happybase" + }, + { + "download_count": 1362080, + "project": "wand" + }, + { + "download_count": 1359167, + "project": "dash-core-components" + }, + { + "download_count": 1355835, + "project": "teamcity-messages" + }, + { + "download_count": 1353938, + "project": "django-treebeard" + }, + { + "download_count": 1353094, + "project": "bottleneck" + }, + { + "download_count": 1347193, + "project": "pipdeptree" + }, + { + "download_count": 1346804, + "project": "flask-socketio" + }, + { + "download_count": 1345086, + "project": "feather-format" + }, + { + "download_count": 1345015, + "project": "pyshp" + }, + { + "download_count": 1340081, + "project": "cerberus-python-client" + }, + { + "download_count": 1339531, + "project": "pytest-ordering" + }, + { + "download_count": 1337974, + "project": "dateutils" + }, + { + "download_count": 1337690, + "project": "ccy" + }, + { + "download_count": 1336766, + "project": "ec2-metadata" + }, + { + "download_count": 1336028, + "project": "gevent-websocket" + }, + { + "download_count": 1333439, + "project": "pyenchant" + }, + { + "download_count": 1333043, + "project": "pykwalify" + }, + { + "download_count": 1331164, + "project": "ptable" + }, + { + "download_count": 1324399, + "project": "dash-html-components" + }, + { + "download_count": 1323369, + "project": "wmctrl" + }, + { + "download_count": 1322854, + "project": "markdown2" + }, + { + "download_count": 1320709, + "project": "fancycompleter" + }, + { + "download_count": 1320502, + "project": "genson" + }, + { + "download_count": 1317756, + "project": "pyhocon" + }, + { + "download_count": 1317236, + "project": "pdbpp" + }, + { + "download_count": 1316522, + "project": "crc16" + }, + { + "download_count": 1310312, + "project": "gnupg" + }, + { + "download_count": 1306934, + "project": "palettable" + }, + { + "download_count": 1306842, + "project": "fake-factory" + }, + { + "download_count": 1302234, + "project": "bson" + }, + { + "download_count": 1293536, + "project": "jsonpath-rw-ext" + }, + { + "download_count": 1291830, + "project": "graphene-django" + }, + { + "download_count": 1288532, + "project": "elasticsearch-curator" + }, + { + "download_count": 1287159, + "project": "agate" + }, + { + "download_count": 1286419, + "project": "pyluach" + }, + { + "download_count": 1276264, + "project": "pytoml" + }, + { + "download_count": 1275859, + "project": "xhtml2pdf" + }, + { + "download_count": 1275165, + "project": "mandrill" + }, + { + "download_count": 1274724, + "project": "aws-sam-cli" + }, + { + "download_count": 1274476, + "project": "aws-lambda-builders" + }, + { + "download_count": 1274226, + "project": "algoliasearch" + }, + { + "download_count": 1273921, + "project": "hupper" + }, + { + "download_count": 1261688, + "project": "testscenarios" + }, + { + "download_count": 1259972, + "project": "cufflinks" + }, + { + "download_count": 1258105, + "project": "signalfx" + }, + { + "download_count": 1257144, + "project": "moviepy" + }, + { + "download_count": 1255798, + "project": "objgraph" + }, + { + "download_count": 1252062, + "project": "chevron" + }, + { + "download_count": 1235194, + "project": "pdf2image" + }, + { + "download_count": 1234160, + "project": "uvicorn" + }, + { + "download_count": 1233486, + "project": "tlslite" + }, + { + "download_count": 1231831, + "project": "pybase64" + }, + { + "download_count": 1230654, + "project": "createsend" + }, + { + "download_count": 1230170, + "project": "gql" + }, + { + "download_count": 1230039, + "project": "imagehash" + }, + { + "download_count": 1228048, + "project": "azureml-defaults" + }, + { + "download_count": 1227477, + "project": "azure-mgmt-imagebuilder" + }, + { + "download_count": 1226165, + "project": "serverlessrepo" + }, + { + "download_count": 1221206, + "project": "pytest-watch" + }, + { + "download_count": 1220741, + "project": "google-cloud-bigquery-storage" + }, + { + "download_count": 1218278, + "project": "django-ses" + }, + { + "download_count": 1217113, + "project": "luminol" + }, + { + "download_count": 1213653, + "project": "pyaes" + }, + { + "download_count": 1213392, + "project": "flask-mongoalchemy" + }, + { + "download_count": 1212483, + "project": "flake8-print" + }, + { + "download_count": 1208573, + "project": "resource" + }, + { + "download_count": 1207795, + "project": "stemming" + }, + { + "download_count": 1206452, + "project": "python-easyconfig" + }, + { + "download_count": 1206109, + "project": "jsonform" + }, + { + "download_count": 1205968, + "project": "jsonsir" + }, + { + "download_count": 1202856, + "project": "logbook" + }, + { + "download_count": 1198077, + "project": "import-from-github-com" + }, + { + "download_count": 1195471, + "project": "mss" + }, + { + "download_count": 1195405, + "project": "robotframework-requests" + }, + { + "download_count": 1194828, + "project": "nose2" + }, + { + "download_count": 1194314, + "project": "fusepy" + }, + { + "download_count": 1193288, + "project": "cmake" + }, + { + "download_count": 1192641, + "project": "httpbin" + }, + { + "download_count": 1190084, + "project": "graphql-server-core" + }, + { + "download_count": 1189375, + "project": "stestr" + }, + { + "download_count": 1188229, + "project": "recordclass" + }, + { + "download_count": 1186101, + "project": "django-bootstrap4" + }, + { + "download_count": 1181472, + "project": "tree-format" + }, + { + "download_count": 1180564, + "project": "django-guardian" + }, + { + "download_count": 1180286, + "project": "django-celery" + }, + { + "download_count": 1179046, + "project": "publicsuffix" + }, + { + "download_count": 1178235, + "project": "astropy" + }, + { + "download_count": 1177835, + "project": "konlpy" + }, + { + "download_count": 1174516, + "project": "threadloop" + }, + { + "download_count": 1174367, + "project": "radon" + }, + { + "download_count": 1172767, + "project": "azure-cli-profile" + }, + { + "download_count": 1172663, + "project": "jieba" + }, + { + "download_count": 1172300, + "project": "pyfakefs" + }, + { + "download_count": 1172278, + "project": "namedlist" + }, + { + "download_count": 1171988, + "project": "pubnub" + }, + { + "download_count": 1170778, + "project": "flasgger" + }, + { + "download_count": 1168270, + "project": "pymeeus" + }, + { + "download_count": 1164230, + "project": "transitions" + }, + { + "download_count": 1163775, + "project": "visitor" + }, + { + "download_count": 1161777, + "project": "django-redis-cache" + }, + { + "download_count": 1161264, + "project": "lmdb" + }, + { + "download_count": 1160572, + "project": "json-logging-py" + }, + { + "download_count": 1159436, + "project": "protobuf3-to-dict" + }, + { + "download_count": 1153262, + "project": "patch" + }, + { + "download_count": 1152875, + "project": "horovod" + }, + { + "download_count": 1152461, + "project": "pyzabbix" + }, + { + "download_count": 1148339, + "project": "tailer" + }, + { + "download_count": 1146680, + "project": "azure-cli-resource" + }, + { + "download_count": 1145300, + "project": "etcd3" + }, + { + "download_count": 1143148, + "project": "azure-cli-iot" + }, + { + "download_count": 1143069, + "project": "djangorestframework-xml" + }, + { + "download_count": 1139676, + "project": "logutils" + }, + { + "download_count": 1138222, + "project": "javaproperties" + }, + { + "download_count": 1137231, + "project": "azure-cli-extension" + }, + { + "download_count": 1137033, + "project": "python-telegram-bot" + }, + { + "download_count": 1135140, + "project": "platformio" + }, + { + "download_count": 1134846, + "project": "xvfbwrapper" + }, + { + "download_count": 1133241, + "project": "pytest-pythonpath" + }, + { + "download_count": 1129508, + "project": "google-cloud-iam" + }, + { + "download_count": 1129177, + "project": "pydrive" + }, + { + "download_count": 1128895, + "project": "minio" + }, + { + "download_count": 1128310, + "project": "python-heatclient" + }, + { + "download_count": 1127447, + "project": "azure-cli-dls" + }, + { + "download_count": 1127383, + "project": "demjson" + }, + { + "download_count": 1126928, + "project": "pygal" + }, + { + "download_count": 1123556, + "project": "azure-cli-role" + }, + { + "download_count": 1123087, + "project": "azure-cli-monitor" + }, + { + "download_count": 1121560, + "project": "azure-cli-storage" + }, + { + "download_count": 1121500, + "project": "azure-cli-sql" + }, + { + "download_count": 1121354, + "project": "azure-cli-keyvault" + }, + { + "download_count": 1121021, + "project": "azure-cli-network" + }, + { + "download_count": 1120955, + "project": "azure-cli-interactive" + }, + { + "download_count": 1120732, + "project": "azure-cli-container" + }, + { + "download_count": 1120661, + "project": "azure-cli-appservice" + }, + { + "download_count": 1120619, + "project": "azure-cli-lab" + }, + { + "download_count": 1120596, + "project": "pydub" + }, + { + "download_count": 1120448, + "project": "azure-cli-acr" + }, + { + "download_count": 1120440, + "project": "pem" + }, + { + "download_count": 1119943, + "project": "azure-cli-acs" + }, + { + "download_count": 1119731, + "project": "azure-cli-cognitiveservices" + }, + { + "download_count": 1118667, + "project": "azure-cli-batch" + }, + { + "download_count": 1118554, + "project": "azure-cli-rdbms" + }, + { + "download_count": 1118179, + "project": "dumbyaml" + }, + { + "download_count": 1118164, + "project": "azure-cli-cosmosdb" + }, + { + "download_count": 1117990, + "project": "azure-cli-dla" + }, + { + "download_count": 1117671, + "project": "azure-cli-vm" + }, + { + "download_count": 1117663, + "project": "graphite-web" + }, + { + "download_count": 1117633, + "project": "easy-thumbnails" + }, + { + "download_count": 1117629, + "project": "ggplot" + }, + { + "download_count": 1117326, + "project": "ncclient" + }, + { + "download_count": 1115734, + "project": "azure-cli-cdn" + }, + { + "download_count": 1115095, + "project": "ipyparallel" + }, + { + "download_count": 1114052, + "project": "uritemplate-py" + }, + { + "download_count": 1113849, + "project": "azure-cli-servicefabric" + }, + { + "download_count": 1112830, + "project": "azure-cli-batchai" + }, + { + "download_count": 1112111, + "project": "colander" + }, + { + "download_count": 1112004, + "project": "libhoney" + }, + { + "download_count": 1111031, + "project": "robotframework-selenium2library" + }, + { + "download_count": 1110924, + "project": "azure-cli-reservations" + }, + { + "download_count": 1110554, + "project": "selectors34" + }, + { + "download_count": 1109781, + "project": "python-redis-lock" + }, + { + "download_count": 1109474, + "project": "django-waffle" + }, + { + "download_count": 1109341, + "project": "construct" + }, + { + "download_count": 1107612, + "project": "pyhcl" + }, + { + "download_count": 1107023, + "project": "allure-python-commons" + }, + { + "download_count": 1106855, + "project": "opencv-python-headless" + }, + { + "download_count": 1104732, + "project": "nibabel" + }, + { + "download_count": 1104394, + "project": "ntplib" + }, + { + "download_count": 1101855, + "project": "gsutil" + }, + { + "download_count": 1099271, + "project": "python-redis" + }, + { + "download_count": 1099171, + "project": "honeycomb-beeline" + }, + { + "download_count": 1095266, + "project": "google-cloud-profiler" + }, + { + "download_count": 1094548, + "project": "djangorestframework-csv" + }, + { + "download_count": 1093507, + "project": "imageio-ffmpeg" + }, + { + "download_count": 1093006, + "project": "rpyc" + }, + { + "download_count": 1092127, + "project": "databricks-api" + }, + { + "download_count": 1091012, + "project": "django-otp" + }, + { + "download_count": 1089786, + "project": "atlassian-jwt-auth" + }, + { + "download_count": 1089668, + "project": "pyscreeze" + }, + { + "download_count": 1088119, + "project": "jsonlines" + }, + { + "download_count": 1087785, + "project": "google-cloud-scheduler" + }, + { + "download_count": 1086837, + "project": "py-moneyed" + }, + { + "download_count": 1086168, + "project": "prospector" + }, + { + "download_count": 1084845, + "project": "pyfcm" + }, + { + "download_count": 1084588, + "project": "leather" + }, + { + "download_count": 1083842, + "project": "flask-session" + }, + { + "download_count": 1083772, + "project": "flask-principal" + }, + { + "download_count": 1081797, + "project": "azure-mgmt-managedservices" + }, + { + "download_count": 1080061, + "project": "zope-sqlalchemy" + }, + { + "download_count": 1079118, + "project": "wikipedia" + }, + { + "download_count": 1078680, + "project": "pyopengl" + }, + { + "download_count": 1077281, + "project": "django-anymail" + }, + { + "download_count": 1075981, + "project": "cov-core" + }, + { + "download_count": 1075897, + "project": "azure-mgmt-netapp" + }, + { + "download_count": 1074798, + "project": "pytest-flake8" + }, + { + "download_count": 1071887, + "project": "requests-cache" + }, + { + "download_count": 1071617, + "project": "plaster-pastedeploy" + }, + { + "download_count": 1071057, + "project": "boxsdk" + }, + { + "download_count": 1070181, + "project": "numpydoc" + }, + { + "download_count": 1069130, + "project": "dodgy" + }, + { + "download_count": 1067802, + "project": "sphinxcontrib-httpdomain" + }, + { + "download_count": 1067667, + "project": "git-url-parse" + }, + { + "download_count": 1065839, + "project": "restructuredtext-lint" + }, + { + "download_count": 1063327, + "project": "django-storages-redux" + }, + { + "download_count": 1061635, + "project": "h2o-pysparkling-2-4" + }, + { + "download_count": 1060942, + "project": "flatbuffers" + }, + { + "download_count": 1059650, + "project": "webassets" + }, + { + "download_count": 1057175, + "project": "gdata" + }, + { + "download_count": 1055836, + "project": "pytest-pep8" + }, + { + "download_count": 1054787, + "project": "setoptconf" + }, + { + "download_count": 1053777, + "project": "flask-graphql" + }, + { + "download_count": 1051978, + "project": "lark-parser" + }, + { + "download_count": 1046552, + "project": "google-cloud-datacatalog" + }, + { + "download_count": 1045356, + "project": "requirements-detector" + }, + { + "download_count": 1043870, + "project": "google-cloud-talent" + }, + { + "download_count": 1043546, + "project": "utils" + }, + { + "download_count": 1043075, + "project": "google-cloud-datalabeling" + }, + { + "download_count": 1042791, + "project": "django-mailgun" + }, + { + "download_count": 1041833, + "project": "google-cloud-os-login" + }, + { + "download_count": 1040789, + "project": "plaster" + }, + { + "download_count": 1040645, + "project": "google-cloud-webrisk" + }, + { + "download_count": 1040329, + "project": "beaker" + }, + { + "download_count": 1039677, + "project": "django-fsm" + }, + { + "download_count": 1039618, + "project": "grpcio-health-checking" + }, + { + "download_count": 1039569, + "project": "flask-apispec" + }, + { + "download_count": 1037586, + "project": "flake8-comprehensions" + }, + { + "download_count": 1036471, + "project": "pylint-flask" + }, + { + "download_count": 1036185, + "project": "pygerduty" + }, + { + "download_count": 1036096, + "project": "pudb" + }, + { + "download_count": 1036044, + "project": "biopython" + }, + { + "download_count": 1035148, + "project": "brewer2mpl" + }, + { + "download_count": 1034346, + "project": "rpy2" + }, + { + "download_count": 1033958, + "project": "dash-table" + }, + { + "download_count": 1033827, + "project": "base58" + }, + { + "download_count": 1033818, + "project": "proto-google-cloud-pubsub-v1" + }, + { + "download_count": 1033419, + "project": "maxminddb-geolite2" + }, + { + "download_count": 1032216, + "project": "bravado-core" + }, + { + "download_count": 1031978, + "project": "starlette" + }, + { + "download_count": 1031797, + "project": "cftime" + }, + { + "download_count": 1030527, + "project": "papermill" + }, + { + "download_count": 1030356, + "project": "pytest-aiohttp" + }, + { + "download_count": 1028784, + "project": "neotime" + }, + { + "download_count": 1028024, + "project": "django-grappelli" + }, + { + "download_count": 1026556, + "project": "csvkit" + }, + { + "download_count": 1026453, + "project": "azure-mgmt-appconfiguration" + }, + { + "download_count": 1025532, + "project": "mando" + }, + { + "download_count": 1025061, + "project": "python-pptx" + }, + { + "download_count": 1024849, + "project": "futurist" + }, + { + "download_count": 1024564, + "project": "tfx" + }, + { + "download_count": 1023148, + "project": "shyaml" + }, + { + "download_count": 1020560, + "project": "whoosh" + }, + { + "download_count": 1019249, + "project": "netcdf4" + }, + { + "download_count": 1018441, + "project": "braintree" + }, + { + "download_count": 1017498, + "project": "pylint-celery" + }, + { + "download_count": 1015935, + "project": "pyautogui" + }, + { + "download_count": 1015329, + "project": "uritools" + }, + { + "download_count": 1014941, + "project": "openshift" + }, + { + "download_count": 1014682, + "project": "jinjasql" + }, + { + "download_count": 1011470, + "project": "bunch" + }, + { + "download_count": 1011345, + "project": "tribool" + }, + { + "download_count": 1010041, + "project": "shade" + }, + { + "download_count": 1009923, + "project": "geoalchemy2" + }, + { + "download_count": 1007914, + "project": "stups-tokens" + }, + { + "download_count": 1007728, + "project": "django-health-check" + }, + { + "download_count": 1006511, + "project": "ansiwrap" + }, + { + "download_count": 1005973, + "project": "djangorestframework-simplejwt" + }, + { + "download_count": 1004447, + "project": "repoze-who" + }, + { + "download_count": 1003341, + "project": "u-msgpack-python" + }, + { + "download_count": 1002884, + "project": "psycogreen" + }, + { + "download_count": 1002180, + "project": "pyroute2" + }, + { + "download_count": 997107, + "project": "impyla" + }, + { + "download_count": 997057, + "project": "functools" + }, + { + "download_count": 995470, + "project": "rq-scheduler" + }, + { + "download_count": 995174, + "project": "xarray" + }, + { + "download_count": 995018, + "project": "dictionaries" + }, + { + "download_count": 995017, + "project": "django-haystack" + }, + { + "download_count": 992160, + "project": "check-manifest" + }, + { + "download_count": 990507, + "project": "python-rapidjson" + }, + { + "download_count": 989611, + "project": "py-vapid" + }, + { + "download_count": 989525, + "project": "textwrap3" + }, + { + "download_count": 988451, + "project": "soundfile" + }, + { + "download_count": 987924, + "project": "python-string-utils" + }, + { + "download_count": 987136, + "project": "pywinauto" + }, + { + "download_count": 985267, + "project": "oslo-db" + }, + { + "download_count": 984514, + "project": "xmlrunner" + }, + { + "download_count": 983293, + "project": "pymdown-extensions" + }, + { + "download_count": 982272, + "project": "sphinx-autobuild" + }, + { + "download_count": 981717, + "project": "django-ckeditor" + }, + { + "download_count": 979521, + "project": "sorl-thumbnail" + }, + { + "download_count": 979220, + "project": "pysmb" + }, + { + "download_count": 978290, + "project": "pymsgbox" + }, + { + "download_count": 977363, + "project": "gapic-google-cloud-pubsub-v1" + }, + { + "download_count": 977316, + "project": "flake8-isort" + }, + { + "download_count": 976939, + "project": "tensorflow-probability" + }, + { + "download_count": 976069, + "project": "oslo-messaging" + }, + { + "download_count": 975772, + "project": "python-coveralls" + }, + { + "download_count": 975418, + "project": "flex" + }, + { + "download_count": 973597, + "project": "seleniumbase" + }, + { + "download_count": 972851, + "project": "flake8-commas" + }, + { + "download_count": 972025, + "project": "dirq" + }, + { + "download_count": 971725, + "project": "glfw" + }, + { + "download_count": 968128, + "project": "trains" + }, + { + "download_count": 967325, + "project": "hjson" + }, + { + "download_count": 966886, + "project": "fs" + }, + { + "download_count": 965395, + "project": "pyahocorasick" + }, + { + "download_count": 965068, + "project": "pytest-repeat" + }, + { + "download_count": 964628, + "project": "swagger-ui-bundle" + }, + { + "download_count": 964597, + "project": "typing-inspect" + }, + { + "download_count": 964448, + "project": "sagemaker" + }, + { + "download_count": 964057, + "project": "vobject" + }, + { + "download_count": 963489, + "project": "dbfread" + }, + { + "download_count": 962456, + "project": "bidict" + }, + { + "download_count": 960677, + "project": "google-python-cloud-debugger" + }, + { + "download_count": 958036, + "project": "cognite-sdk" + }, + { + "download_count": 957690, + "project": "vulture" + }, + { + "download_count": 957559, + "project": "pytweening" + }, + { + "download_count": 954913, + "project": "circleci" + }, + { + "download_count": 954734, + "project": "onnxmltools" + }, + { + "download_count": 953896, + "project": "django-jsonfield" + }, + { + "download_count": 952673, + "project": "skl2onnx" + }, + { + "download_count": 951906, + "project": "azure-cli-configure" + }, + { + "download_count": 951530, + "project": "readerwriterlock" + }, + { + "download_count": 951124, + "project": "django-silk" + }, + { + "download_count": 948790, + "project": "json-log-formatter" + }, + { + "download_count": 948696, + "project": "stups-zign" + }, + { + "download_count": 948084, + "project": "commentjson" + }, + { + "download_count": 947759, + "project": "opentracing-instrumentation" + }, + { + "download_count": 947140, + "project": "hurry-filesize" + }, + { + "download_count": 946596, + "project": "httpie" + }, + { + "download_count": 945434, + "project": "comtypes" + }, + { + "download_count": 944648, + "project": "azure-cli-cloud" + }, + { + "download_count": 942122, + "project": "stups-cli-support" + }, + { + "download_count": 941812, + "project": "textfsm" + }, + { + "download_count": 941227, + "project": "django-bulk-update" + }, + { + "download_count": 940485, + "project": "pydotplus" + }, + { + "download_count": 939994, + "project": "logilab-common" + }, + { + "download_count": 939219, + "project": "thriftpy2" + }, + { + "download_count": 937977, + "project": "pyldap" + }, + { + "download_count": 937103, + "project": "progressbar" + }, + { + "download_count": 936822, + "project": "limits" + }, + { + "download_count": 935302, + "project": "empy" + }, + { + "download_count": 933336, + "project": "interval" + }, + { + "download_count": 933102, + "project": "twitter-common-lang" + }, + { + "download_count": 932594, + "project": "sanic" + }, + { + "download_count": 932344, + "project": "twitter-common-dirutil" + }, + { + "download_count": 931618, + "project": "uhashring" + }, + { + "download_count": 929734, + "project": "asana" + }, + { + "download_count": 926851, + "project": "base64io" + }, + { + "download_count": 925789, + "project": "django-user-agents" + }, + { + "download_count": 924447, + "project": "reno" + }, + { + "download_count": 923715, + "project": "netmiko" + }, + { + "download_count": 923299, + "project": "twitter-common-options" + }, + { + "download_count": 923153, + "project": "twitter-common-log" + }, + { + "download_count": 923141, + "project": "parsley" + }, + { + "download_count": 921602, + "project": "azure-cli-find" + }, + { + "download_count": 920951, + "project": "azure-cli-redis" + }, + { + "download_count": 920654, + "project": "aws-encryption-sdk-cli" + }, + { + "download_count": 920109, + "project": "stop-words" + }, + { + "download_count": 919963, + "project": "azure-cli-consumption" + }, + { + "download_count": 919735, + "project": "pydevd" + }, + { + "download_count": 919608, + "project": "azure-cli-billing" + }, + { + "download_count": 919364, + "project": "azure-cli-feedback" + }, + { + "download_count": 919204, + "project": "click-log" + }, + { + "download_count": 916168, + "project": "pypd" + }, + { + "download_count": 914683, + "project": "azure-cli-advisor" + }, + { + "download_count": 914682, + "project": "neobolt" + }, + { + "download_count": 911537, + "project": "azure-cli-eventgrid" + }, + { + "download_count": 911471, + "project": "annoy" + }, + { + "download_count": 910544, + "project": "scramp" + }, + { + "download_count": 910046, + "project": "azure-cli-backup" + }, + { + "download_count": 908651, + "project": "flask-assets" + }, + { + "download_count": 908244, + "project": "oslo-service" + }, + { + "download_count": 905587, + "project": "flask-bootstrap" + }, + { + "download_count": 903282, + "project": "proglog" + }, + { + "download_count": 903200, + "project": "keras2onnx" + }, + { + "download_count": 902334, + "project": "plyvel" + }, + { + "download_count": 900779, + "project": "pybluez" + }, + { + "download_count": 899502, + "project": "pyudev" + }, + { + "download_count": 899012, + "project": "testrepository" + }, + { + "download_count": 898793, + "project": "oslo-policy" + }, + { + "download_count": 897914, + "project": "pmdarima" + }, + { + "download_count": 897653, + "project": "django-autocomplete-light" + }, + { + "download_count": 895791, + "project": "artifactory" + }, + { + "download_count": 895766, + "project": "pytest-variables" + }, + { + "download_count": 895437, + "project": "azure-cli-eventhubs" + }, + { + "download_count": 895142, + "project": "twitter-common-collections" + }, + { + "download_count": 894979, + "project": "azure-cli-servicebus" + }, + { + "download_count": 894815, + "project": "testresources" + }, + { + "download_count": 894191, + "project": "pybs" + }, + { + "download_count": 893842, + "project": "azure-cli-dms" + }, + { + "download_count": 893592, + "project": "channels-redis" + }, + { + "download_count": 893412, + "project": "junitparser" + }, + { + "download_count": 891540, + "project": "tifffile" + }, + { + "download_count": 891533, + "project": "easydict" + }, + { + "download_count": 891481, + "project": "json2parquet" + }, + { + "download_count": 891341, + "project": "pyicu" + }, + { + "download_count": 888690, + "project": "azure-cli-ams" + }, + { + "download_count": 886402, + "project": "pyeapi" + }, + { + "download_count": 885171, + "project": "python-gilt" + }, + { + "download_count": 884033, + "project": "azure-cli-search" + }, + { + "download_count": 882989, + "project": "jupyter-nbextensions-configurator" + }, + { + "download_count": 881790, + "project": "monthdelta" + }, + { + "download_count": 880765, + "project": "pynput" + }, + { + "download_count": 880406, + "project": "pyfiglet" + }, + { + "download_count": 878563, + "project": "jsonnet" + }, + { + "download_count": 874987, + "project": "pvlib" + }, + { + "download_count": 874000, + "project": "jupyter-contrib-core" + }, + { + "download_count": 872790, + "project": "mockito" + }, + { + "download_count": 872554, + "project": "nosexcover" + }, + { + "download_count": 872485, + "project": "peakutils" + }, + { + "download_count": 872331, + "project": "rednose" + }, + { + "download_count": 872127, + "project": "ansicolors" + }, + { + "download_count": 871498, + "project": "j2cli" + }, + { + "download_count": 868629, + "project": "awsiotpythonsdk" + }, + { + "download_count": 867297, + "project": "pywfm" + }, + { + "download_count": 866741, + "project": "lml" + }, + { + "download_count": 865346, + "project": "imblearn" + }, + { + "download_count": 863870, + "project": "openstackdocstheme" + }, + { + "download_count": 863120, + "project": "jupyter-contrib-nbextensions" + }, + { + "download_count": 860421, + "project": "molecule" + }, + { + "download_count": 858716, + "project": "zstandard" + }, + { + "download_count": 858408, + "project": "pyqrcode" + }, + { + "download_count": 856466, + "project": "line-profiler" + }, + { + "download_count": 856334, + "project": "flask-api" + }, + { + "download_count": 856299, + "project": "honcho" + }, + { + "download_count": 856226, + "project": "jplephem" + }, + { + "download_count": 855767, + "project": "rpqueue" + }, + { + "download_count": 854839, + "project": "autoflake" + }, + { + "download_count": 854260, + "project": "azure-mgmt-apimanagement" + }, + { + "download_count": 854182, + "project": "cognite-model-hosting" + }, + { + "download_count": 852933, + "project": "pytest-dependency" + }, + { + "download_count": 852580, + "project": "pytest-pylint" + }, + { + "download_count": 852418, + "project": "deepmerge" + }, + { + "download_count": 850683, + "project": "jupyter-latex-envs" + }, + { + "download_count": 849484, + "project": "polyline" + }, + { + "download_count": 849092, + "project": "yappi" + }, + { + "download_count": 849002, + "project": "logmatic-python" + }, + { + "download_count": 848508, + "project": "sgp4" + }, + { + "download_count": 848205, + "project": "onnxconverter-common" + }, + { + "download_count": 847724, + "project": "django-pipeline" + }, + { + "download_count": 847508, + "project": "envs" + }, + { + "download_count": 847487, + "project": "jupyter-highlight-selected-word" + }, + { + "download_count": 846088, + "project": "googletrans" + }, + { + "download_count": 845652, + "project": "mkdocs-material" + }, + { + "download_count": 845331, + "project": "django-bootstrap3" + }, + { + "download_count": 843583, + "project": "isoweek" + }, + { + "download_count": 843510, + "project": "image" + }, + { + "download_count": 842232, + "project": "solartime" + }, + { + "download_count": 841714, + "project": "flask-debugtoolbar" + }, + { + "download_count": 840214, + "project": "rasterio" + }, + { + "download_count": 839139, + "project": "diamond" + }, + { + "download_count": 837673, + "project": "mailchimp3" + }, + { + "download_count": 835610, + "project": "oslo-middleware" + }, + { + "download_count": 835257, + "project": "mutagen" + }, + { + "download_count": 834695, + "project": "catalogue" + }, + { + "download_count": 834133, + "project": "faulthandler" + }, + { + "download_count": 832671, + "project": "sacrebleu" + }, + { + "download_count": 832545, + "project": "python-jose-cryptodome" + }, + { + "download_count": 831517, + "project": "zeroconf" + }, + { + "download_count": 830534, + "project": "jinja2-pluralize" + }, + { + "download_count": 829948, + "project": "suds-py3" + }, + { + "download_count": 829228, + "project": "pandasql" + }, + { + "download_count": 828892, + "project": "logstash-formatter" + }, + { + "download_count": 828549, + "project": "lifelines" + }, + { + "download_count": 827727, + "project": "liac-arff" + }, + { + "download_count": 827554, + "project": "diff-cover" + }, + { + "download_count": 826205, + "project": "elastic-apm" + }, + { + "download_count": 826135, + "project": "django-coverage-plugin" + }, + { + "download_count": 825300, + "project": "skyfield" + }, + { + "download_count": 824924, + "project": "drf-extensions" + }, + { + "download_count": 823613, + "project": "databricks-pypi-extras" + }, + { + "download_count": 823180, + "project": "azure-cli-relay" + }, + { + "download_count": 822954, + "project": "azure-cli-iotcentral" + }, + { + "download_count": 822898, + "project": "azure-cli-hdinsight" + }, + { + "download_count": 822664, + "project": "azure-cli-maps" + }, + { + "download_count": 822562, + "project": "azure-cli-botservice" + }, + { + "download_count": 822180, + "project": "azure-cli-signalr" + }, + { + "download_count": 822129, + "project": "lime" + }, + { + "download_count": 821534, + "project": "transifex-client" + }, + { + "download_count": 820293, + "project": "azure-cli-policyinsights" + }, + { + "download_count": 819714, + "project": "django-classy-tags" + }, + { + "download_count": 818561, + "project": "clickhouse-driver" + }, + { + "download_count": 815459, + "project": "scrapy-splash" + }, + { + "download_count": 815166, + "project": "pybrake" + }, + { + "download_count": 814136, + "project": "carbon" + }, + { + "download_count": 813628, + "project": "wmi" + }, + { + "download_count": 810452, + "project": "python-ironicclient" + }, + { + "download_count": 808082, + "project": "pusher" + }, + { + "download_count": 806951, + "project": "datadiff" + }, + { + "download_count": 806876, + "project": "js2py" + }, + { + "download_count": 805430, + "project": "urlobject" + }, + { + "download_count": 804845, + "project": "tinydb" + }, + { + "download_count": 804621, + "project": "pytest-randomly" + }, + { + "download_count": 804371, + "project": "placebo" + }, + { + "download_count": 804270, + "project": "progress" + }, + { + "download_count": 804201, + "project": "nimbusml" + }, + { + "download_count": 803677, + "project": "ffmpeg-python" + }, + { + "download_count": 803390, + "project": "pandas-profiling" + }, + { + "download_count": 803033, + "project": "pyspark-flame" + }, + { + "download_count": 802518, + "project": "nose-xunitmp" + }, + { + "download_count": 801270, + "project": "ftputil" + }, + { + "download_count": 800466, + "project": "pyexcel-io" + }, + { + "download_count": 800452, + "project": "pysam" + }, + { + "download_count": 800033, + "project": "oslo-cache" + }, + { + "download_count": 799400, + "project": "jinja2schema" + }, + { + "download_count": 797811, + "project": "skyfield-data" + }, + { + "download_count": 797080, + "project": "bashate" + }, + { + "download_count": 796778, + "project": "pytest-base-url" + }, + { + "download_count": 795722, + "project": "mpld3" + }, + { + "download_count": 795138, + "project": "pytest-selenium" + }, + { + "download_count": 794945, + "project": "facebookads" + }, + { + "download_count": 792726, + "project": "testing-common-database" + }, + { + "download_count": 792699, + "project": "requests-unixsocket" + }, + { + "download_count": 791454, + "project": "ansible-tower-cli" + }, + { + "download_count": 790178, + "project": "dlib" + }, + { + "download_count": 788016, + "project": "web3" + }, + { + "download_count": 787379, + "project": "pygresql" + }, + { + "download_count": 786501, + "project": "update-checker" + }, + { + "download_count": 784385, + "project": "pygetwindow" + }, + { + "download_count": 783264, + "project": "allure-pytest" + }, + { + "download_count": 782719, + "project": "pycontracts" + }, + { + "download_count": 782492, + "project": "wsgi-request-logger" + }, + { + "download_count": 780141, + "project": "m2crypto" + }, + { + "download_count": 779854, + "project": "scrapyd" + }, + { + "download_count": 779681, + "project": "centrosome" + }, + { + "download_count": 779517, + "project": "flask-mongoengine" + }, + { + "download_count": 778027, + "project": "dataclasses-json" + }, + { + "download_count": 777762, + "project": "splinter" + }, + { + "download_count": 777345, + "project": "htmlparser" + }, + { + "download_count": 775376, + "project": "loguru" + }, + { + "download_count": 774793, + "project": "dumb-init" + }, + { + "download_count": 774504, + "project": "python-designateclient" + }, + { + "download_count": 774495, + "project": "speaklater" + }, + { + "download_count": 773679, + "project": "eth-utils" + }, + { + "download_count": 772719, + "project": "spark-df-profiling" + }, + { + "download_count": 772355, + "project": "javabridge" + }, + { + "download_count": 771179, + "project": "us" + }, + { + "download_count": 769552, + "project": "xdg" + }, + { + "download_count": 769306, + "project": "librabbitmq" + }, + { + "download_count": 769240, + "project": "lepl" + }, + { + "download_count": 769163, + "project": "pysolr" + }, + { + "download_count": 768526, + "project": "google-cloud-happybase" + }, + { + "download_count": 768426, + "project": "graphene-sqlalchemy" + }, + { + "download_count": 768057, + "project": "google-endpoints-api-management" + }, + { + "download_count": 767991, + "project": "affine" + }, + { + "download_count": 767570, + "project": "colour" + }, + { + "download_count": 764562, + "project": "django-constance" + }, + { + "download_count": 762359, + "project": "infinity" + }, + { + "download_count": 761920, + "project": "djangorestframework-filters" + }, + { + "download_count": 760164, + "project": "robotremoteserver" + }, + { + "download_count": 759992, + "project": "keystonemiddleware" + }, + { + "download_count": 758677, + "project": "distribute" + }, + { + "download_count": 757044, + "project": "hyper" + }, + { + "download_count": 755707, + "project": "pyscreenshot" + }, + { + "download_count": 755554, + "project": "google-endpoints" + }, + { + "download_count": 754592, + "project": "intervals" + }, + { + "download_count": 754564, + "project": "pysal" + }, + { + "download_count": 754317, + "project": "svgwrite" + }, + { + "download_count": 753732, + "project": "cognite-logger" + }, + { + "download_count": 753586, + "project": "pytest-spark" + }, + { + "download_count": 753503, + "project": "nose-parallel" + }, + { + "download_count": 753048, + "project": "dynaconf" + }, + { + "download_count": 752651, + "project": "mahotas" + }, + { + "download_count": 751112, + "project": "databricks-pypi" + }, + { + "download_count": 749141, + "project": "mysql" + }, + { + "download_count": 749102, + "project": "flake8-builtins" + }, + { + "download_count": 748778, + "project": "humpty" + }, + { + "download_count": 748490, + "project": "pyspark-dist-explore" + }, + { + "download_count": 746836, + "project": "django-annoying" + }, + { + "download_count": 746781, + "project": "tinyrpc" + }, + { + "download_count": 746415, + "project": "wincertstore" + }, + { + "download_count": 745591, + "project": "django-axes" + }, + { + "download_count": 742692, + "project": "aerospike" + }, + { + "download_count": 739560, + "project": "pycadf" + }, + { + "download_count": 739333, + "project": "django-csp" + }, + { + "download_count": 737212, + "project": "django-compat" + }, + { + "download_count": 735567, + "project": "azure-cli-security" + }, + { + "download_count": 735347, + "project": "asyncssh" + }, + { + "download_count": 734370, + "project": "robotframework-sshlibrary" + }, + { + "download_count": 734265, + "project": "concurrentloghandler" + }, + { + "download_count": 734033, + "project": "django-object-actions" + }, + { + "download_count": 733362, + "project": "azure-cli-kusto" + }, + { + "download_count": 733347, + "project": "tensorflowonspark" + }, + { + "download_count": 732849, + "project": "aioresponses" + }, + { + "download_count": 731576, + "project": "jenkins-job-builder" + }, + { + "download_count": 731088, + "project": "bravado" + }, + { + "download_count": 728665, + "project": "prometheus-flask-exporter" + }, + { + "download_count": 727540, + "project": "pprint" + }, + { + "download_count": 726931, + "project": "jaeger-client" + }, + { + "download_count": 726893, + "project": "nose-parameterized" + }, + { + "download_count": 726613, + "project": "pyrect" + }, + { + "download_count": 726590, + "project": "htcondor" + }, + { + "download_count": 723307, + "project": "pip-licenses" + }, + { + "download_count": 723172, + "project": "mlxtend" + }, + { + "download_count": 721353, + "project": "py2-ipaddress" + }, + { + "download_count": 719973, + "project": "osprofiler" + }, + { + "download_count": 719532, + "project": "pandas-datareader" + }, + { + "download_count": 718534, + "project": "ngram" + }, + { + "download_count": 718362, + "project": "h2o" + }, + { + "download_count": 717198, + "project": "homeassistant" + }, + { + "download_count": 716605, + "project": "pytest-mypy" + }, + { + "download_count": 716398, + "project": "eth-typing" + }, + { + "download_count": 716263, + "project": "django-auth-ldap" + }, + { + "download_count": 714558, + "project": "jsonmerge" + }, + { + "download_count": 714088, + "project": "django-cacheops" + }, + { + "download_count": 713825, + "project": "python-bioformats" + }, + { + "download_count": 713644, + "project": "stomp-py" + }, + { + "download_count": 713346, + "project": "scrypt" + }, + { + "download_count": 710233, + "project": "prokaryote" + }, + { + "download_count": 709352, + "project": "testing-postgresql" + }, + { + "download_count": 708670, + "project": "azure-cli-sqlvm" + }, + { + "download_count": 708401, + "project": "shrub-py" + }, + { + "download_count": 708219, + "project": "django-tinymce" + }, + { + "download_count": 708181, + "project": "scrapyd-client" + }, + { + "download_count": 707527, + "project": "apiclient" + }, + { + "download_count": 707254, + "project": "imgaug" + }, + { + "download_count": 707113, + "project": "nbsphinx" + }, + { + "download_count": 707083, + "project": "waiting" + }, + { + "download_count": 705264, + "project": "colorclass" + }, + { + "download_count": 703706, + "project": "consul-kv" + }, + { + "download_count": 702978, + "project": "html" + }, + { + "download_count": 702738, + "project": "rlp" + }, + { + "download_count": 702351, + "project": "nose-cov" + }, + { + "download_count": 702193, + "project": "python-twitter" + }, + { + "download_count": 701163, + "project": "splunk-sdk" + }, + { + "download_count": 700250, + "project": "fastcluster" + }, + { + "download_count": 698719, + "project": "yamale" + }, + { + "download_count": 698219, + "project": "pyramid-arima" + }, + { + "download_count": 697868, + "project": "termstyle" + }, + { + "download_count": 697474, + "project": "xstatic-bootstrap-scss" + }, + { + "download_count": 695211, + "project": "pyrouge" + }, + { + "download_count": 694603, + "project": "snuggs" + }, + { + "download_count": 693279, + "project": "python-barbicanclient" + }, + { + "download_count": 693249, + "project": "pyaudio" + }, + { + "download_count": 692957, + "project": "cvxpy" + }, + { + "download_count": 692001, + "project": "async-lru" + }, + { + "download_count": 691907, + "project": "mizani" + }, + { + "download_count": 691307, + "project": "petname" + }, + { + "download_count": 691300, + "project": "rouge" + }, + { + "download_count": 689543, + "project": "agate-dbf" + }, + { + "download_count": 688981, + "project": "fastapi" + }, + { + "download_count": 687783, + "project": "category-encoders" + }, + { + "download_count": 687548, + "project": "oyaml" + }, + { + "download_count": 687522, + "project": "gnureadline" + }, + { + "download_count": 687081, + "project": "rake-nltk" + }, + { + "download_count": 686921, + "project": "titlecase" + }, + { + "download_count": 685900, + "project": "robotframework-pabot" + }, + { + "download_count": 685000, + "project": "pygraphviz" + }, + { + "download_count": 684549, + "project": "awesome-slugify" + }, + { + "download_count": 684157, + "project": "ibmiotf" + }, + { + "download_count": 683792, + "project": "cpplint" + }, + { + "download_count": 683191, + "project": "transforms3d" + }, + { + "download_count": 681681, + "project": "junos-eznc" + }, + { + "download_count": 680817, + "project": "edn-format" + }, + { + "download_count": 680484, + "project": "kappa" + }, + { + "download_count": 680439, + "project": "dist-keras" + }, + { + "download_count": 679352, + "project": "wagtail" + }, + { + "download_count": 679107, + "project": "xstatic" + }, + { + "download_count": 678488, + "project": "sparkpost" + }, + { + "download_count": 677907, + "project": "django-configurations" + }, + { + "download_count": 676671, + "project": "warrant" + }, + { + "download_count": 675669, + "project": "coremltools" + }, + { + "download_count": 675660, + "project": "pystemmer" + }, + { + "download_count": 674957, + "project": "piexif" + }, + { + "download_count": 674880, + "project": "xstatic-jquery" + }, + { + "download_count": 674487, + "project": "ebaysdk" + }, + { + "download_count": 672829, + "project": "durationpy" + }, + { + "download_count": 670913, + "project": "odo" + }, + { + "download_count": 670060, + "project": "django-admin-rangefilter" + }, + { + "download_count": 669445, + "project": "pytrie" + }, + { + "download_count": 669083, + "project": "wxpython" + }, + { + "download_count": 667717, + "project": "ovs" + }, + { + "download_count": 667474, + "project": "ecos" + }, + { + "download_count": 666906, + "project": "tinycss" + }, + { + "download_count": 666871, + "project": "osqp" + }, + { + "download_count": 666786, + "project": "eth-hash" + }, + { + "download_count": 666275, + "project": "requirements-parser" + }, + { + "download_count": 665693, + "project": "glom" + }, + { + "download_count": 661492, + "project": "cbor" + }, + { + "download_count": 661312, + "project": "typeguard" + }, + { + "download_count": 660570, + "project": "auth0-python" + }, + { + "download_count": 660013, + "project": "grpcio-opentracing" + }, + { + "download_count": 659377, + "project": "fastcache" + }, + { + "download_count": 659193, + "project": "eth-abi" + }, + { + "download_count": 659114, + "project": "django-modelcluster" + }, + { + "download_count": 657030, + "project": "jgscm" + }, + { + "download_count": 656904, + "project": "xlocal" + }, + { + "download_count": 656475, + "project": "plotnine" + }, + { + "download_count": 655373, + "project": "oslo-reports" + }, + { + "download_count": 654961, + "project": "selectors2" + }, + { + "download_count": 653743, + "project": "pyexcel" + }, + { + "download_count": 653621, + "project": "mongoalchemy" + }, + { + "download_count": 652980, + "project": "django-celery-monitor" + }, + { + "download_count": 652428, + "project": "django-modeltranslation" + }, + { + "download_count": 651995, + "project": "m3-cdecimal" + }, + { + "download_count": 651743, + "project": "django-prometheus" + }, + { + "download_count": 649810, + "project": "pylama" + }, + { + "download_count": 649753, + "project": "pygtrie" + }, + { + "download_count": 649300, + "project": "zappa" + }, + { + "download_count": 648596, + "project": "lambda-packages" + }, + { + "download_count": 648298, + "project": "chainmap" + }, + { + "download_count": 648259, + "project": "sqlitedict" + }, + { + "download_count": 646634, + "project": "weakrefmethod" + }, + { + "download_count": 646583, + "project": "pyephem" + }, + { + "download_count": 646316, + "project": "pecan" + }, + { + "download_count": 646192, + "project": "grpcio-testing" + }, + { + "download_count": 645984, + "project": "ptpython" + }, + { + "download_count": 645726, + "project": "uwsgitop" + }, + { + "download_count": 645705, + "project": "xattr" + }, + { + "download_count": 645542, + "project": "sseclient" + }, + { + "download_count": 644773, + "project": "distance" + }, + { + "download_count": 641990, + "project": "crayons" + }, + { + "download_count": 641666, + "project": "scs" + }, + { + "download_count": 641155, + "project": "youtube-dl-server" + }, + { + "download_count": 640583, + "project": "pydicom" + }, + { + "download_count": 640562, + "project": "disklist" + }, + { + "download_count": 640283, + "project": "oslo-versionedobjects" + }, + { + "download_count": 639381, + "project": "property-manager" + }, + { + "download_count": 639343, + "project": "pyramid-tm" + }, + { + "download_count": 638235, + "project": "civis" + }, + { + "download_count": 638153, + "project": "flask-sslify" + }, + { + "download_count": 637064, + "project": "tflearn" + }, + { + "download_count": 635676, + "project": "pygeoif" + }, + { + "download_count": 635375, + "project": "anytree" + }, + { + "download_count": 634585, + "project": "prawcore" + }, + { + "download_count": 633579, + "project": "httmock" + }, + { + "download_count": 633551, + "project": "praw" + }, + { + "download_count": 633536, + "project": "blaze" + }, + { + "download_count": 630085, + "project": "dogstatsd-python" + }, + { + "download_count": 629789, + "project": "df2gspread" + }, + { + "download_count": 629728, + "project": "intelhex" + }, + { + "download_count": 628881, + "project": "flask-pymongo" + }, + { + "download_count": 628208, + "project": "ara" + }, + { + "download_count": 628016, + "project": "supervisor-checks" + }, + { + "download_count": 626928, + "project": "portpicker" + }, + { + "download_count": 626822, + "project": "willow" + }, + { + "download_count": 624147, + "project": "django-admin-sortable2" + }, + { + "download_count": 623219, + "project": "py2neo" + }, + { + "download_count": 622538, + "project": "dis3" + }, + { + "download_count": 621132, + "project": "dask-ml" + }, + { + "download_count": 620942, + "project": "doc8" + }, + { + "download_count": 620211, + "project": "duo-client" + }, + { + "download_count": 620141, + "project": "django-rq" + }, + { + "download_count": 619804, + "project": "cronex" + }, + { + "download_count": 619350, + "project": "quandl" + }, + { + "download_count": 616490, + "project": "fpdf" + }, + { + "download_count": 615575, + "project": "dpkt" + }, + { + "download_count": 615407, + "project": "img2pdf" + }, + { + "download_count": 614677, + "project": "twython" + }, + { + "download_count": 612945, + "project": "django-tastypie" + }, + { + "download_count": 612710, + "project": "fastkml" + }, + { + "download_count": 611741, + "project": "pychef" + }, + { + "download_count": 611286, + "project": "pbkdf2" + }, + { + "download_count": 611114, + "project": "envparse" + }, + { + "download_count": 610989, + "project": "pytest-profiling" + }, + { + "download_count": 610971, + "project": "face" + }, + { + "download_count": 609341, + "project": "sphinxcontrib-plantuml" + }, + { + "download_count": 609245, + "project": "pockets" + }, + { + "download_count": 609190, + "project": "pex" + }, + { + "download_count": 607985, + "project": "codacy-coverage" + }, + { + "download_count": 607915, + "project": "smtpapi" + }, + { + "download_count": 607247, + "project": "recordtype" + }, + { + "download_count": 604747, + "project": "django-sekizai" + }, + { + "download_count": 604376, + "project": "glances" + }, + { + "download_count": 603378, + "project": "pysha3" + }, + { + "download_count": 602654, + "project": "sphinxcontrib-napoleon" + }, + { + "download_count": 601446, + "project": "authlib" + }, + { + "download_count": 601374, + "project": "python-intercom" + }, + { + "download_count": 600575, + "project": "flask-limiter" + }, + { + "download_count": 600277, + "project": "python-statsd" + }, + { + "download_count": 599602, + "project": "draftjs-exporter" + }, + { + "download_count": 598699, + "project": "flake8-debugger" + }, + { + "download_count": 598674, + "project": "oslo-upgradecheck" + }, + { + "download_count": 598119, + "project": "libvirt-python" + }, + { + "download_count": 597629, + "project": "cron-descriptor" + }, + { + "download_count": 597332, + "project": "wsproto" + }, + { + "download_count": 597238, + "project": "asyncio-nats-client" + }, + { + "download_count": 597234, + "project": "pytorch-pretrained-bert" + }, + { + "download_count": 597090, + "project": "fixture" + }, + { + "download_count": 596614, + "project": "alpha-vantage" + }, + { + "download_count": 596308, + "project": "edgegrid-python" + }, + { + "download_count": 596233, + "project": "eth-keys" + }, + { + "download_count": 596043, + "project": "impacket" + }, + { + "download_count": 595545, + "project": "win-inet-pton" + }, + { + "download_count": 595350, + "project": "mox3" + }, + { + "download_count": 595102, + "project": "rarfile" + }, + { + "download_count": 593426, + "project": "yarn-api-client" + }, + { + "download_count": 593291, + "project": "colored" + }, + { + "download_count": 592042, + "project": "txaws" + }, + { + "download_count": 591199, + "project": "speechrecognition" + }, + { + "download_count": 591134, + "project": "frozen-flask" + }, + { + "download_count": 590993, + "project": "django-log-request-id" + }, + { + "download_count": 589804, + "project": "funcparserlib" + }, + { + "download_count": 589445, + "project": "djangorestframework-camel-case" + }, + { + "download_count": 588165, + "project": "oslo-privsep" + }, + { + "download_count": 587455, + "project": "tf-nightly" + }, + { + "download_count": 587372, + "project": "caniusepython3" + }, + { + "download_count": 586559, + "project": "envtpl" + }, + { + "download_count": 586159, + "project": "mockredispy" + }, + { + "download_count": 586076, + "project": "properties" + }, + { + "download_count": 585723, + "project": "ansi2html" + }, + { + "download_count": 585253, + "project": "pyzipcode" + }, + { + "download_count": 584788, + "project": "sphinx-autodoc-typehints" + }, + { + "download_count": 583551, + "project": "environs" + }, + { + "download_count": 583517, + "project": "junit2html" + }, + { + "download_count": 583339, + "project": "yoyo-migrations" + }, + { + "download_count": 582030, + "project": "junitxml" + }, + { + "download_count": 580290, + "project": "django-heroku" + }, + { + "download_count": 579947, + "project": "chart-studio" + }, + { + "download_count": 579171, + "project": "pyexecjs" + }, + { + "download_count": 578063, + "project": "datasketch" + }, + { + "download_count": 577373, + "project": "django-autoslug" + }, + { + "download_count": 577155, + "project": "pyrepl" + }, + { + "download_count": 576195, + "project": "polygon-geohasher" + }, + { + "download_count": 575933, + "project": "addict" + }, + { + "download_count": 575932, + "project": "tooz" + }, + { + "download_count": 575622, + "project": "mecab-python3" + }, + { + "download_count": 575453, + "project": "shippo" + }, + { + "download_count": 575188, + "project": "bindep" + }, + { + "download_count": 574250, + "project": "requests-html" + }, + { + "download_count": 573651, + "project": "python-louvain" + }, + { + "download_count": 572787, + "project": "zmq" + }, + { + "download_count": 571317, + "project": "eth-account" + }, + { + "download_count": 571250, + "project": "ortools" + }, + { + "download_count": 570798, + "project": "automaton" + }, + { + "download_count": 570379, + "project": "django-cors-middleware" + }, + { + "download_count": 570213, + "project": "rq-dashboard" + }, + { + "download_count": 569967, + "project": "oslo-rootwrap" + }, + { + "download_count": 569775, + "project": "pilkit" + }, + { + "download_count": 569584, + "project": "readthedocs-sphinx-ext" + }, + { + "download_count": 569334, + "project": "latexcodec" + }, + { + "download_count": 568887, + "project": "south" + }, + { + "download_count": 568427, + "project": "agate-excel" + }, + { + "download_count": 568046, + "project": "hexbytes" + }, + { + "download_count": 567653, + "project": "django-money" + }, + { + "download_count": 567483, + "project": "agate-sql" + }, + { + "download_count": 566872, + "project": "kitchen" + }, + { + "download_count": 566696, + "project": "unipath" + }, + { + "download_count": 566631, + "project": "sshuttle" + }, + { + "download_count": 566158, + "project": "robotframework-faker" + }, + { + "download_count": 565395, + "project": "pybtex" + }, + { + "download_count": 565136, + "project": "django-nested-admin" + }, + { + "download_count": 564284, + "project": "eth-keyfile" + }, + { + "download_count": 564232, + "project": "djangorestframework-bulk" + }, + { + "download_count": 564010, + "project": "dataset" + }, + { + "download_count": 563254, + "project": "trafaret" + }, + { + "download_count": 562622, + "project": "cheetah3" + }, + { + "download_count": 561733, + "project": "flask-security" + }, + { + "download_count": 560775, + "project": "aliyun-python-sdk-core-v3" + }, + { + "download_count": 560763, + "project": "azureml-train-automl" + }, + { + "download_count": 559850, + "project": "control" + }, + { + "download_count": 559644, + "project": "implicit" + }, + { + "download_count": 559092, + "project": "dependency-injector" + }, + { + "download_count": 558284, + "project": "lazy" + }, + { + "download_count": 558189, + "project": "unidiff" + }, + { + "download_count": 557350, + "project": "textdistance" + }, + { + "download_count": 557098, + "project": "python-monkey-business" + }, + { + "download_count": 556600, + "project": "untangle" + }, + { + "download_count": 556409, + "project": "reverse-geocoder" + }, + { + "download_count": 556261, + "project": "pygeoip" + }, + { + "download_count": 554953, + "project": "eth-rlp" + }, + { + "download_count": 552622, + "project": "databricks" + }, + { + "download_count": 552459, + "project": "pyvim" + }, + { + "download_count": 551935, + "project": "taskflow" + }, + { + "download_count": 551365, + "project": "ifaddr" + }, + { + "download_count": 549608, + "project": "eeweather" + }, + { + "download_count": 549360, + "project": "clickhouse-cityhash" + }, + { + "download_count": 548549, + "project": "django-hijack" + }, + { + "download_count": 547813, + "project": "names" + }, + { + "download_count": 547796, + "project": "castellan" + }, + { + "download_count": 547711, + "project": "sacremoses" + }, + { + "download_count": 547488, + "project": "flake8-blind-except" + }, + { + "download_count": 547363, + "project": "mozdebug" + }, + { + "download_count": 547215, + "project": "ofxparse" + }, + { + "download_count": 546668, + "project": "vatnumber" + }, + { + "download_count": 546665, + "project": "remoto" + }, + { + "download_count": 546052, + "project": "checksumdir" + }, + { + "download_count": 545735, + "project": "pyowm" + }, + { + "download_count": 545330, + "project": "poster" + }, + { + "download_count": 543997, + "project": "lzstring" + }, + { + "download_count": 543850, + "project": "pyminizip" + }, + { + "download_count": 543634, + "project": "np-utils" + }, + { + "download_count": 543596, + "project": "injector" + }, + { + "download_count": 543183, + "project": "django-imagekit" + }, + { + "download_count": 542497, + "project": "five9" + }, + { + "download_count": 542414, + "project": "static3" + }, + { + "download_count": 541667, + "project": "oset" + }, + { + "download_count": 540962, + "project": "jsbeautifier" + }, + { + "download_count": 540750, + "project": "hdbscan" + }, + { + "download_count": 540280, + "project": "os-testr" + }, + { + "download_count": 540000, + "project": "flask-babelex" + }, + { + "download_count": 539901, + "project": "positional" + }, + { + "download_count": 539021, + "project": "profilehooks" + }, + { + "download_count": 538332, + "project": "flask-rq2" + }, + { + "download_count": 538314, + "project": "pygpgme" + }, + { + "download_count": 538159, + "project": "ts-flint" + }, + { + "download_count": 538112, + "project": "google-api-helper" + }, + { + "download_count": 537857, + "project": "markuppy" + }, + { + "download_count": 537565, + "project": "keras-mxnet" + }, + { + "download_count": 535795, + "project": "kwargs-only" + }, + { + "download_count": 534335, + "project": "django-mathfilters" + }, + { + "download_count": 534222, + "project": "dj-static" + }, + { + "download_count": 533502, + "project": "web-py" + }, + { + "download_count": 533322, + "project": "zenpy" + }, + { + "download_count": 533300, + "project": "django-enumfields" + }, + { + "download_count": 533281, + "project": "georaptor" + }, + { + "download_count": 533198, + "project": "heroku3" + }, + { + "download_count": 533034, + "project": "oci" + }, + { + "download_count": 532545, + "project": "django-fernet-fields" + }, + { + "download_count": 531368, + "project": "pyftpdlib" + }, + { + "download_count": 529065, + "project": "neutron-lib" + }, + { + "download_count": 529026, + "project": "grpcio-reflection" + }, + { + "download_count": 528753, + "project": "python-jsonschema-objects" + }, + { + "download_count": 528555, + "project": "django-dynamic-fixture" + }, + { + "download_count": 528426, + "project": "pyod" + }, + { + "download_count": 528307, + "project": "simplekml" + }, + { + "download_count": 527593, + "project": "overrides" + }, + { + "download_count": 526989, + "project": "ovsdbapp" + }, + { + "download_count": 526603, + "project": "tavern" + }, + { + "download_count": 526180, + "project": "peppercorn" + }, + { + "download_count": 526018, + "project": "cbapi" + }, + { + "download_count": 525952, + "project": "twitter-common-contextutil" + }, + { + "download_count": 523345, + "project": "pypdf" + }, + { + "download_count": 523091, + "project": "couchbase" + }, + { + "download_count": 522723, + "project": "profanityfilter" + }, + { + "download_count": 522269, + "project": "blist" + }, + { + "download_count": 522185, + "project": "pydns" + }, + { + "download_count": 521431, + "project": "stopit" + }, + { + "download_count": 521064, + "project": "keyboard" + }, + { + "download_count": 520346, + "project": "twitter-common-util" + }, + { + "download_count": 520255, + "project": "flatten-json" + }, + { + "download_count": 519427, + "project": "twitter-common-string" + }, + { + "download_count": 519406, + "project": "tableauserverclient" + }, + { + "download_count": 519368, + "project": "m2r" + }, + { + "download_count": 519326, + "project": "twitter-common-process" + }, + { + "download_count": 519222, + "project": "twitter-common-app" + }, + { + "download_count": 518985, + "project": "json-rpc" + }, + { + "download_count": 517770, + "project": "slack-webhook-cli" + }, + { + "download_count": 517297, + "project": "antigate" + }, + { + "download_count": 516754, + "project": "sphinxcontrib-bibtex" + }, + { + "download_count": 516195, + "project": "pybtex-docutils" + }, + { + "download_count": 515133, + "project": "rfc6266-parser" + }, + { + "download_count": 514541, + "project": "nflx-genie-client" + }, + { + "download_count": 513202, + "project": "missingno" + }, + { + "download_count": 513069, + "project": "mitmproxy" + }, + { + "download_count": 512838, + "project": "conan-package-tools" + }, + { + "download_count": 512668, + "project": "xlutils" + }, + { + "download_count": 512441, + "project": "pprintpp" + }, + { + "download_count": 512440, + "project": "os-traits" + }, + { + "download_count": 512397, + "project": "svglib" + }, + { + "download_count": 510713, + "project": "btrees" + }, + { + "download_count": 510636, + "project": "graphframes" + }, + { + "download_count": 509946, + "project": "sarge" + }, + { + "download_count": 509466, + "project": "shadowsocks" + }, + { + "download_count": 509388, + "project": "hmsclient" + }, + { + "download_count": 509166, + "project": "azure-mgmt-servermanager" + }, + { + "download_count": 508757, + "project": "elasticache-pyclient" + }, + { + "download_count": 508756, + "project": "xstatic-patternfly" + }, + { + "download_count": 508352, + "project": "pep257" + }, + { + "download_count": 508010, + "project": "xstatic-patternfly-bootstrap-treeview" + }, + { + "download_count": 507803, + "project": "xstatic-datatables" + }, + { + "download_count": 507499, + "project": "django-recaptcha" + }, + { + "download_count": 507473, + "project": "persistent" + }, + { + "download_count": 507135, + "project": "altair" + }, + { + "download_count": 505888, + "project": "edx-enterprise" + }, + { + "download_count": 505690, + "project": "graphy" + }, + { + "download_count": 505101, + "project": "redlock-py" + }, + { + "download_count": 504911, + "project": "pymc3" + }, + { + "download_count": 504787, + "project": "mercantile" + }, + { + "download_count": 504175, + "project": "lftools" + }, + { + "download_count": 502985, + "project": "robotframework-httplibrary" + }, + { + "download_count": 501914, + "project": "tsfresh" + }, + { + "download_count": 501627, + "project": "fitbit" + }, + { + "download_count": 501439, + "project": "lightfm" + }, + { + "download_count": 501354, + "project": "djoser" + }, + { + "download_count": 501217, + "project": "pytest-faulthandler" + }, + { + "download_count": 500476, + "project": "formencode" + }, + { + "download_count": 500465, + "project": "spyne" + }, + { + "download_count": 500288, + "project": "backports-os" + }, + { + "download_count": 500147, + "project": "customerio" + }, + { + "download_count": 499726, + "project": "os-win" + }, + { + "download_count": 499639, + "project": "neptune-client" + }, + { + "download_count": 499204, + "project": "googleappenginecloudstorageclient" + }, + { + "download_count": 498658, + "project": "sparqlwrapper" + }, + { + "download_count": 498519, + "project": "sphinxcontrib-spelling" + }, + { + "download_count": 498177, + "project": "geotext" + }, + { + "download_count": 497560, + "project": "pytest-lazy-fixture" + }, + { + "download_count": 497085, + "project": "pyarabic" + }, + { + "download_count": 497017, + "project": "auditwheel" + }, + { + "download_count": 496676, + "project": "django-debug-panel" + }, + { + "download_count": 495919, + "project": "cssmin" + }, + { + "download_count": 495656, + "project": "nose-progressive" + }, + { + "download_count": 495187, + "project": "django-suit" + }, + { + "download_count": 495183, + "project": "mercurial" + }, + { + "download_count": 495032, + "project": "python-hosts" + }, + { + "download_count": 494652, + "project": "pywatchman" + }, + { + "download_count": 494192, + "project": "pip-lock" + }, + { + "download_count": 494177, + "project": "clikit" + }, + { + "download_count": 494100, + "project": "flake8-per-file-ignores" + }, + { + "download_count": 493208, + "project": "os-brick" + }, + { + "download_count": 492737, + "project": "cloudinary" + }, + { + "download_count": 492342, + "project": "pyroma" + }, + { + "download_count": 491821, + "project": "aiohttp-jinja2" + }, + { + "download_count": 491668, + "project": "func-timeout" + }, + { + "download_count": 491557, + "project": "ldapdomaindump" + }, + { + "download_count": 490771, + "project": "logzio-python-handler" + }, + { + "download_count": 490651, + "project": "yarg" + }, + { + "download_count": 490261, + "project": "python-geoip" + }, + { + "download_count": 489169, + "project": "gremlinpython" + }, + { + "download_count": 488646, + "project": "uplink" + }, + { + "download_count": 487621, + "project": "pyjarowinkler" + }, + { + "download_count": 485859, + "project": "qt4reactor" + }, + { + "download_count": 485712, + "project": "records" + }, + { + "download_count": 485512, + "project": "flake8-string-format" + }, + { + "download_count": 485371, + "project": "django-rest-framework" + }, + { + "download_count": 485084, + "project": "pydruid" + }, + { + "download_count": 484914, + "project": "meson" + }, + { + "download_count": 484556, + "project": "django-select2" + }, + { + "download_count": 484267, + "project": "pamqp" + }, + { + "download_count": 484090, + "project": "xmljson" + }, + { + "download_count": 483920, + "project": "slots" + }, + { + "download_count": 483748, + "project": "doublemetaphone" + }, + { + "download_count": 483545, + "project": "pycli" + }, + { + "download_count": 483354, + "project": "jupyterlab-launcher" + }, + { + "download_count": 482936, + "project": "editorconfig" + }, + { + "download_count": 482719, + "project": "pamela" + }, + { + "download_count": 482539, + "project": "rdpy" + }, + { + "download_count": 482395, + "project": "word2number" + }, + { + "download_count": 482346, + "project": "pykmip" + }, + { + "download_count": 480460, + "project": "recurly" + }, + { + "download_count": 479945, + "project": "datarobot" + }, + { + "download_count": 479251, + "project": "email-reply-parser" + }, + { + "download_count": 479059, + "project": "geohash2" + }, + { + "download_count": 478838, + "project": "readchar" + }, + { + "download_count": 478822, + "project": "mohawk" + }, + { + "download_count": 478394, + "project": "orjson" + }, + { + "download_count": 478032, + "project": "pycocotools" + }, + { + "download_count": 477626, + "project": "pythonnet" + }, + { + "download_count": 477384, + "project": "deap" + }, + { + "download_count": 476311, + "project": "cursor" + }, + { + "download_count": 475480, + "project": "django-jenkins" + }, + { + "download_count": 475049, + "project": "azureml-automl-core" + }, + { + "download_count": 474562, + "project": "sklearn-crfsuite" + }, + { + "download_count": 472571, + "project": "azure-mgmt-documentdb" + }, + { + "download_count": 471293, + "project": "paretochart" + }, + { + "download_count": 471137, + "project": "python-debian" + }, + { + "download_count": 471045, + "project": "rply" + }, + { + "download_count": 469934, + "project": "pynliner" + }, + { + "download_count": 469110, + "project": "ipwhois" + }, + { + "download_count": 468984, + "project": "pylint-quotes" + }, + { + "download_count": 468853, + "project": "sfmergeutility" + }, + { + "download_count": 468745, + "project": "pyside2" + }, + { + "download_count": 468673, + "project": "cupy-cuda100" + }, + { + "download_count": 468012, + "project": "tokenize-rt" + }, + { + "download_count": 467174, + "project": "halo" + }, + { + "download_count": 467029, + "project": "pyblake2" + }, + { + "download_count": 466658, + "project": "python-keyczar" + }, + { + "download_count": 466596, + "project": "pytest-factoryboy" + }, + { + "download_count": 466322, + "project": "pyramid-mako" + }, + { + "download_count": 465692, + "project": "speedtest-cli" + }, + { + "download_count": 465559, + "project": "ansible-vault" + }, + { + "download_count": 465439, + "project": "sure" + }, + { + "download_count": 465170, + "project": "h3" + }, + { + "download_count": 464606, + "project": "pysolar" + }, + { + "download_count": 464135, + "project": "os-vif" + }, + { + "download_count": 462962, + "project": "gcovr" + }, + { + "download_count": 462652, + "project": "gputil" + }, + { + "download_count": 462649, + "project": "pyexcel-xlsx" + }, + { + "download_count": 462258, + "project": "pytest-bdd" + }, + { + "download_count": 462062, + "project": "qtpy" + }, + { + "download_count": 461447, + "project": "marshmallow-jsonschema" + }, + { + "download_count": 461130, + "project": "xmlschema" + }, + { + "download_count": 461066, + "project": "log-symbols" + }, + { + "download_count": 461026, + "project": "aiopg" + }, + { + "download_count": 461021, + "project": "paypalrestsdk" + }, + { + "download_count": 459361, + "project": "bpython" + }, + { + "download_count": 459221, + "project": "django-memoize" + }, + { + "download_count": 458741, + "project": "pastescript" + }, + { + "download_count": 458467, + "project": "djangorestframework-gis" + }, + { + "download_count": 458421, + "project": "yamlordereddictloader" + }, + { + "download_count": 458237, + "project": "azure-cli-privatedns" + }, + { + "download_count": 457094, + "project": "jupyterhub" + }, + { + "download_count": 457021, + "project": "pytest-random-order" + }, + { + "download_count": 456889, + "project": "cli-helpers" + }, + { + "download_count": 456492, + "project": "django-jet" + }, + { + "download_count": 456487, + "project": "django-solo" + }, + { + "download_count": 455927, + "project": "easypkg" + }, + { + "download_count": 455745, + "project": "oslotest" + }, + { + "download_count": 455660, + "project": "td-client" + }, + { + "download_count": 455550, + "project": "docker-buildtool" + }, + { + "download_count": 455228, + "project": "pyactiveresource" + }, + { + "download_count": 455148, + "project": "filetype" + }, + { + "download_count": 454275, + "project": "integrationhelper" + }, + { + "download_count": 454060, + "project": "treeinterpreter" + }, + { + "download_count": 453726, + "project": "spinners" + }, + { + "download_count": 453478, + "project": "tinys3" + }, + { + "download_count": 452911, + "project": "google-nucleus" + }, + { + "download_count": 452905, + "project": "sfctl" + }, + { + "download_count": 452659, + "project": "wsme" + }, + { + "download_count": 452548, + "project": "cloudml-hypertune" + }, + { + "download_count": 452284, + "project": "djrill" + }, + { + "download_count": 451894, + "project": "rdflib-jsonld" + }, + { + "download_count": 451751, + "project": "pyhull" + }, + { + "download_count": 451388, + "project": "weka-easypy" + }, + { + "download_count": 451340, + "project": "zerorpc" + }, + { + "download_count": 450074, + "project": "requests-aws-sign" + }, + { + "download_count": 449859, + "project": "apns2" + }, + { + "download_count": 449829, + "project": "pytest-freezegun" + }, + { + "download_count": 449733, + "project": "logentries" + }, + { + "download_count": 449274, + "project": "polling" + }, + { + "download_count": 449144, + "project": "ner" + }, + { + "download_count": 448946, + "project": "pycuber" + }, + { + "download_count": 448187, + "project": "dfply" + }, + { + "download_count": 447960, + "project": "elasticsearch5" + }, + { + "download_count": 447647, + "project": "pyramid-debugtoolbar" + }, + { + "download_count": 447433, + "project": "dohq-artifactory" + }, + { + "download_count": 447042, + "project": "graphyte" + }, + { + "download_count": 446699, + "project": "gtts-token" + }, + { + "download_count": 446599, + "project": "s3io" + }, + { + "download_count": 446457, + "project": "pyldavis" + }, + { + "download_count": 446070, + "project": "dm-xmlsec-binding" + }, + { + "download_count": 445558, + "project": "oslo-vmware" + }, + { + "download_count": 445493, + "project": "mkdocs-minify-plugin" + }, + { + "download_count": 442789, + "project": "systemd-python" + }, + { + "download_count": 441825, + "project": "django-daterange-filter" + }, + { + "download_count": 441288, + "project": "pycld2" + }, + { + "download_count": 441011, + "project": "ffmpy" + }, + { + "download_count": 440747, + "project": "onnxruntime" + }, + { + "download_count": 440442, + "project": "pathmatch" + }, + { + "download_count": 440074, + "project": "beatbox" + }, + { + "download_count": 439695, + "project": "dotmap" + }, + { + "download_count": 439566, + "project": "atari-py" + }, + { + "download_count": 436976, + "project": "pytest-socket" + }, + { + "download_count": 436145, + "project": "matplotlib-venn" + }, + { + "download_count": 434595, + "project": "dnslib" + }, + { + "download_count": 434167, + "project": "leveldb" + }, + { + "download_count": 433865, + "project": "django-dirtyfields" + }, + { + "download_count": 433860, + "project": "shiboken2" + }, + { + "download_count": 433596, + "project": "chameleon" + }, + { + "download_count": 433574, + "project": "python-social-auth" + }, + { + "download_count": 433514, + "project": "xunitparser" + }, + { + "download_count": 433494, + "project": "tempest" + }, + { + "download_count": 433330, + "project": "django-extra-views" + }, + { + "download_count": 433032, + "project": "django-sslserver" + }, + { + "download_count": 432924, + "project": "netstorageapi" + }, + { + "download_count": 432577, + "project": "django-bootstrap-form" + }, + { + "download_count": 431716, + "project": "aio-pika" + }, + { + "download_count": 431533, + "project": "curtsies" + }, + { + "download_count": 431368, + "project": "edx-proctoring" + }, + { + "download_count": 429918, + "project": "rules" + }, + { + "download_count": 429501, + "project": "treq" + }, + { + "download_count": 429446, + "project": "python2-pythondialog" + }, + { + "download_count": 429251, + "project": "shopifyapi" + }, + { + "download_count": 429239, + "project": "pyros-genmsg" + }, + { + "download_count": 428668, + "project": "pyros-genpy" + }, + { + "download_count": 427728, + "project": "django-webtest" + }, + { + "download_count": 427374, + "project": "cpp-coveralls" + }, + { + "download_count": 426629, + "project": "hyperloglog" + }, + { + "download_count": 425518, + "project": "pathvalidate" + }, + { + "download_count": 424129, + "project": "marisa-trie" + }, + { + "download_count": 423827, + "project": "graphene-file-upload" + }, + { + "download_count": 423528, + "project": "wurlitzer" + }, + { + "download_count": 423446, + "project": "geoip" + }, + { + "download_count": 423400, + "project": "nameko" + }, + { + "download_count": 422280, + "project": "pipreqs" + }, + { + "download_count": 422034, + "project": "airbrake" + }, + { + "download_count": 421423, + "project": "python-barcode" + }, + { + "download_count": 420487, + "project": "featuretools" + }, + { + "download_count": 420463, + "project": "pydes" + }, + { + "download_count": 420080, + "project": "oss2" + }, + { + "download_count": 419064, + "project": "win-unicode-console" + }, + { + "download_count": 418651, + "project": "aiocontextvars" + }, + { + "download_count": 417979, + "project": "flake8-logging-format" + }, + { + "download_count": 417452, + "project": "aiokafka" + }, + { + "download_count": 416219, + "project": "astunparse" + }, + { + "download_count": 414872, + "project": "doit" + }, + { + "download_count": 414706, + "project": "scikit-surprise" + }, + { + "download_count": 414280, + "project": "flask-mysql" + }, + { + "download_count": 414268, + "project": "pygerrit2" + }, + { + "download_count": 412851, + "project": "requests-http-signature" + }, + { + "download_count": 412476, + "project": "django-dotenv" + }, + { + "download_count": 412152, + "project": "ffmpeg-quality-metrics" + }, + { + "download_count": 412022, + "project": "spotify-tensorflow" + }, + { + "download_count": 411026, + "project": "wsgi-intercept" + }, + { + "download_count": 410904, + "project": "breathe" + }, + { + "download_count": 410783, + "project": "google-api-python-client-uritemplate" + }, + { + "download_count": 408750, + "project": "django-ajax-selects" + }, + { + "download_count": 408606, + "project": "websocket" + }, + { + "download_count": 408486, + "project": "healthcheck" + }, + { + "download_count": 408427, + "project": "redo" + }, + { + "download_count": 408117, + "project": "pypiserver" + }, + { + "download_count": 408017, + "project": "localstack-client" + }, + { + "download_count": 407856, + "project": "fastai" + }, + { + "download_count": 407560, + "project": "django-impersonate" + }, + { + "download_count": 407287, + "project": "zipcodes" + }, + { + "download_count": 407121, + "project": "treelib" + }, + { + "download_count": 407028, + "project": "django-stubs" + }, + { + "download_count": 406712, + "project": "django-two-factor-auth" + }, + { + "download_count": 405396, + "project": "json-delta" + }, + { + "download_count": 405170, + "project": "socketio-client" + }, + { + "download_count": 405065, + "project": "gin-config" + }, + { + "download_count": 405060, + "project": "coverage-badge" + }, + { + "download_count": 404993, + "project": "django-sendgrid-v5" + }, + { + "download_count": 404902, + "project": "shutilwhich" + }, + { + "download_count": 404866, + "project": "flask-redis" + }, + { + "download_count": 404373, + "project": "pep562" + }, + { + "download_count": 404209, + "project": "niet" + }, + { + "download_count": 403508, + "project": "dask-glm" + }, + { + "download_count": 402928, + "project": "evergreen-py" + }, + { + "download_count": 402697, + "project": "zxcvbn" + }, + { + "download_count": 402692, + "project": "dataproperty" + }, + { + "download_count": 402398, + "project": "pygeohash" + }, + { + "download_count": 401062, + "project": "ast" + }, + { + "download_count": 400982, + "project": "pyobjc-core" + }, + { + "download_count": 400958, + "project": "http-ece" + }, + { + "download_count": 400803, + "project": "readline" + }, + { + "download_count": 400450, + "project": "django-elasticsearch-dsl" + }, + { + "download_count": 400436, + "project": "python-xlib" + }, + { + "download_count": 400407, + "project": "flatten-dict" + }, + { + "download_count": 399614, + "project": "gherkin-official" + }, + { + "download_count": 399263, + "project": "elementpath" + }, + { + "download_count": 399214, + "project": "gdal" + }, + { + "download_count": 399000, + "project": "roman" + }, + { + "download_count": 398885, + "project": "click-spinner" + }, + { + "download_count": 398873, + "project": "chalice" + }, + { + "download_count": 398463, + "project": "django-filer" + }, + { + "download_count": 398402, + "project": "ldclient-py" + }, + { + "download_count": 398269, + "project": "gtts" + }, + { + "download_count": 397948, + "project": "django-registration" + }, + { + "download_count": 397646, + "project": "collectfast" + }, + { + "download_count": 396999, + "project": "django-jinja" + }, + { + "download_count": 396968, + "project": "eradicate" + }, + { + "download_count": 396714, + "project": "neo4j-driver" + }, + { + "download_count": 396369, + "project": "cybox" + }, + { + "download_count": 396364, + "project": "asgi-redis" + }, + { + "download_count": 396056, + "project": "boto3-type-annotations" + }, + { + "download_count": 395861, + "project": "etcd3gw" + }, + { + "download_count": 395415, + "project": "face-recognition" + }, + { + "download_count": 395184, + "project": "os-xenapi" + }, + { + "download_count": 395153, + "project": "neo4j" + }, + { + "download_count": 394185, + "project": "pytrends" + }, + { + "download_count": 393950, + "project": "grpcio-status" + }, + { + "download_count": 393467, + "project": "sailthru-client" + }, + { + "download_count": 393315, + "project": "repoze-sendmail" + }, + { + "download_count": 393244, + "project": "bayesian-optimization" + }, + { + "download_count": 393069, + "project": "pillow-simd" + }, + { + "download_count": 392655, + "project": "inquirer" + }, + { + "download_count": 391989, + "project": "watson-developer-cloud" + }, + { + "download_count": 391807, + "project": "assertpy" + }, + { + "download_count": 391722, + "project": "chainer" + }, + { + "download_count": 391162, + "project": "aiogithubapi" + }, + { + "download_count": 391117, + "project": "pyclustering" + }, + { + "download_count": 390635, + "project": "django-test-plus" + }, + { + "download_count": 389572, + "project": "azureml-explain-model" + }, + { + "download_count": 389554, + "project": "param" + }, + { + "download_count": 388843, + "project": "smartsheet-python-sdk" + }, + { + "download_count": 388646, + "project": "google-ads" + }, + { + "download_count": 387346, + "project": "unicode-slugify" + }, + { + "download_count": 387007, + "project": "django-smtp-ssl" + }, + { + "download_count": 386636, + "project": "udatetime" + }, + { + "download_count": 386540, + "project": "pyobjc-framework-cocoa" + }, + { + "download_count": 386296, + "project": "confuse" + }, + { + "download_count": 386037, + "project": "hdfs3" + }, + { + "download_count": 385593, + "project": "moznetwork" + }, + { + "download_count": 385320, + "project": "pydot2" + }, + { + "download_count": 385150, + "project": "djangocms-admin-style" + }, + { + "download_count": 384650, + "project": "pyquaternion" + }, + { + "download_count": 384272, + "project": "xblock" + }, + { + "download_count": 384195, + "project": "flask-talisman" + }, + { + "download_count": 383670, + "project": "paver" + }, + { + "download_count": 383579, + "project": "pytorch-transformers" + }, + { + "download_count": 383499, + "project": "netdisco" + }, + { + "download_count": 383345, + "project": "kivy" + }, + { + "download_count": 383182, + "project": "django-uuidfield" + }, + { + "download_count": 382848, + "project": "jwt" + }, + { + "download_count": 382404, + "project": "logdna" + }, + { + "download_count": 382235, + "project": "relativetimebuilder" + }, + { + "download_count": 381845, + "project": "json2html" + }, + { + "download_count": 381570, + "project": "pytest-helpers-namespace" + }, + { + "download_count": 381409, + "project": "codespell" + }, + { + "download_count": 381241, + "project": "open3d-python" + }, + { + "download_count": 381173, + "project": "aws" + }, + { + "download_count": 381129, + "project": "plyfile" + }, + { + "download_count": 380993, + "project": "py-spy" + }, + { + "download_count": 380964, + "project": "aliyun-python-sdk-kms" + }, + { + "download_count": 380771, + "project": "stix" + }, + { + "download_count": 379960, + "project": "pywebpush" + }, + { + "download_count": 379915, + "project": "paramiko-expect" + }, + { + "download_count": 379467, + "project": "face-recognition-models" + }, + { + "download_count": 379302, + "project": "umap-learn" + }, + { + "download_count": 378977, + "project": "cbor2" + }, + { + "download_count": 378025, + "project": "django-redis-sessions" + }, + { + "download_count": 377737, + "project": "pymisp" + }, + { + "download_count": 377661, + "project": "django-test-without-migrations" + }, + { + "download_count": 377526, + "project": "readability-lxml" + }, + { + "download_count": 377300, + "project": "python-jsonrpc-server" + }, + { + "download_count": 377259, + "project": "yara-python" + }, + { + "download_count": 376371, + "project": "scikit-build" + }, + { + "download_count": 376213, + "project": "wasmer" + }, + { + "download_count": 376182, + "project": "django-templated-email" + }, + { + "download_count": 375778, + "project": "www-authenticate" + }, + { + "download_count": 375656, + "project": "plaid-python" + }, + { + "download_count": 375163, + "project": "mixbox" + }, + { + "download_count": 374823, + "project": "fastdiff" + }, + { + "download_count": 374712, + "project": "pyang" + }, + { + "download_count": 373785, + "project": "flake8-tidy-imports" + }, + { + "download_count": 373672, + "project": "dnspython3" + }, + { + "download_count": 373668, + "project": "twitter-common-confluence" + }, + { + "download_count": 373502, + "project": "cursive" + }, + { + "download_count": 372891, + "project": "requests-oauth" + }, + { + "download_count": 372768, + "project": "edx-opaque-keys" + }, + { + "download_count": 372679, + "project": "flake8-mutable" + }, + { + "download_count": 372516, + "project": "docxtpl" + }, + { + "download_count": 372505, + "project": "reloader" + }, + { + "download_count": 371987, + "project": "ibm-cos-sdk" + }, + { + "download_count": 371891, + "project": "python-multipart" + }, + { + "download_count": 371361, + "project": "shodan" + }, + { + "download_count": 370894, + "project": "glance-store" + }, + { + "download_count": 370618, + "project": "blobxfer" + }, + { + "download_count": 370307, + "project": "mailchimp" + }, + { + "download_count": 370281, + "project": "amazon-kclpy" + }, + { + "download_count": 369713, + "project": "azure-cli-deploymentmanager" + }, + { + "download_count": 369303, + "project": "cfscrape" + }, + { + "download_count": 369271, + "project": "gabbi" + }, + { + "download_count": 368704, + "project": "docker-registry-client" + }, + { + "download_count": 368627, + "project": "visdom" + }, + { + "download_count": 368133, + "project": "djangosaml2" + }, + { + "download_count": 367774, + "project": "torchfile" + }, + { + "download_count": 367743, + "project": "python-language-server" + }, + { + "download_count": 367741, + "project": "django-registration-redux" + }, + { + "download_count": 366408, + "project": "pypowervm" + }, + { + "download_count": 365959, + "project": "pypubsub" + }, + { + "download_count": 365726, + "project": "flake8-mypy" + }, + { + "download_count": 365550, + "project": "mixer" + }, + { + "download_count": 365313, + "project": "config" + }, + { + "download_count": 365224, + "project": "pytorch" + }, + { + "download_count": 364756, + "project": "py-geohash-any" + }, + { + "download_count": 364330, + "project": "pantsbuild-pants" + }, + { + "download_count": 364200, + "project": "strif" + }, + { + "download_count": 364189, + "project": "pgc-interface" + }, + { + "download_count": 363919, + "project": "pyrasite" + }, + { + "download_count": 363463, + "project": "browsermob-proxy" + }, + { + "download_count": 362770, + "project": "marshmallow-oneofschema" + }, + { + "download_count": 362569, + "project": "python-saml" + }, + { + "download_count": 362447, + "project": "pymc" + }, + { + "download_count": 362409, + "project": "vadersentiment" + }, + { + "download_count": 362107, + "project": "pyxero" + }, + { + "download_count": 361277, + "project": "ccxt" + }, + { + "download_count": 361145, + "project": "executor" + }, + { + "download_count": 360517, + "project": "requests-pkcs12" + }, + { + "download_count": 360423, + "project": "instaclone" + }, + { + "download_count": 360015, + "project": "exchangelib" + }, + { + "download_count": 359650, + "project": "lomond" + }, + { + "download_count": 359422, + "project": "mibian" + }, + { + "download_count": 359376, + "project": "sip" + }, + { + "download_count": 358575, + "project": "django-ordered-model" + }, + { + "download_count": 358484, + "project": "eyed3" + }, + { + "download_count": 358443, + "project": "pysendfile" + }, + { + "download_count": 358260, + "project": "nose-testconfig" + }, + { + "download_count": 358034, + "project": "delegator-py" + }, + { + "download_count": 357573, + "project": "currencyconverter" + }, + { + "download_count": 356478, + "project": "backports-lzma" + }, + { + "download_count": 356429, + "project": "p4python" + }, + { + "download_count": 356412, + "project": "zope-index" + }, + { + "download_count": 356169, + "project": "cloudflare" + }, + { + "download_count": 356004, + "project": "cql" + }, + { + "download_count": 355945, + "project": "dacite" + }, + { + "download_count": 355827, + "project": "python-cjson" + }, + { + "download_count": 355794, + "project": "marshmallow-arrow" + }, + { + "download_count": 355729, + "project": "mbstrdecoder" + }, + { + "download_count": 354987, + "project": "urlextract" + }, + { + "download_count": 354886, + "project": "typepy" + }, + { + "download_count": 354885, + "project": "htpasswd" + }, + { + "download_count": 354555, + "project": "mod-wsgi" + }, + { + "download_count": 354506, + "project": "django-cms" + }, + { + "download_count": 353955, + "project": "flask-apscheduler" + }, + { + "download_count": 353201, + "project": "pymobiledetect" + }, + { + "download_count": 353184, + "project": "times" + }, + { + "download_count": 352996, + "project": "zabbix-api" + }, + { + "download_count": 352927, + "project": "bcdoc" + }, + { + "download_count": 352725, + "project": "torchtext" + }, + { + "download_count": 352313, + "project": "flashtext" + }, + { + "download_count": 351678, + "project": "referer-parser" + }, + { + "download_count": 350758, + "project": "pyexcel-xls" + }, + { + "download_count": 350681, + "project": "edx-drf-extensions" + }, + { + "download_count": 350665, + "project": "falcon-multipart" + }, + { + "download_count": 350619, + "project": "inotify" + }, + { + "download_count": 350184, + "project": "tpot" + }, + { + "download_count": 349490, + "project": "mypy-protobuf" + }, + { + "download_count": 349330, + "project": "pygit2" + }, + { + "download_count": 348567, + "project": "robotbackgroundlogger" + }, + { + "download_count": 348256, + "project": "traces" + }, + { + "download_count": 348166, + "project": "django-extra-fields" + }, + { + "download_count": 348009, + "project": "rook" + }, + { + "download_count": 348008, + "project": "ssh2-python" + }, + { + "download_count": 347979, + "project": "jupytext" + }, + { + "download_count": 347497, + "project": "optunity" + }, + { + "download_count": 347125, + "project": "django-safedelete" + }, + { + "download_count": 347040, + "project": "django-jsonview" + }, + { + "download_count": 347003, + "project": "allure-behave" + }, + { + "download_count": 346883, + "project": "forex-python" + }, + { + "download_count": 346742, + "project": "logger" + }, + { + "download_count": 346329, + "project": "django-choices" + }, + { + "download_count": 345484, + "project": "xdis" + }, + { + "download_count": 345296, + "project": "django-babel" + }, + { + "download_count": 345262, + "project": "parse-accept-language" + }, + { + "download_count": 344856, + "project": "scons" + }, + { + "download_count": 344819, + "project": "klein" + }, + { + "download_count": 344742, + "project": "flask-shell-ipython" + }, + { + "download_count": 344586, + "project": "amqplib" + }, + { + "download_count": 344301, + "project": "betamax" + }, + { + "download_count": 344260, + "project": "flask-basicauth" + }, + { + "download_count": 344021, + "project": "pybarcode" + }, + { + "download_count": 343992, + "project": "pytest-json" + }, + { + "download_count": 343912, + "project": "uiautomation" + }, + { + "download_count": 343788, + "project": "pyemd" + }, + { + "download_count": 343547, + "project": "flufl-enum" + }, + { + "download_count": 342092, + "project": "normality" + }, + { + "download_count": 341312, + "project": "osc-placement" + }, + { + "download_count": 340998, + "project": "pytest-parallel" + }, + { + "download_count": 340763, + "project": "crochet" + }, + { + "download_count": 340105, + "project": "proximityhash" + }, + { + "download_count": 339952, + "project": "pyscss" + }, + { + "download_count": 339480, + "project": "python-qpid-proton" + }, + { + "download_count": 339302, + "project": "vtk" + }, + { + "download_count": 338910, + "project": "hmmlearn" + }, + { + "download_count": 338542, + "project": "pyqtwebengine" + }, + { + "download_count": 337957, + "project": "django-watchman" + }, + { + "download_count": 337701, + "project": "python-igraph" + }, + { + "download_count": 337586, + "project": "edxval" + }, + { + "download_count": 337501, + "project": "ibm-cos-sdk-core" + }, + { + "download_count": 337200, + "project": "edx-django-utils" + }, + { + "download_count": 336856, + "project": "ibm-cos-sdk-s3transfer" + }, + { + "download_count": 336294, + "project": "spark-nlp" + }, + { + "download_count": 335964, + "project": "rhea" + }, + { + "download_count": 335873, + "project": "exifread" + }, + { + "download_count": 335709, + "project": "tensorflow-estimator-2-0-preview" + }, + { + "download_count": 335463, + "project": "python-binary-memcached" + }, + { + "download_count": 335218, + "project": "spyder" + }, + { + "download_count": 334977, + "project": "rstr" + }, + { + "download_count": 334204, + "project": "asteval" + }, + { + "download_count": 333818, + "project": "uncompyle6" + }, + { + "download_count": 333754, + "project": "requests-async" + }, + { + "download_count": 333266, + "project": "kaitaistruct" + }, + { + "download_count": 332129, + "project": "multiprocessing" + }, + { + "download_count": 332061, + "project": "chromedriver" + }, + { + "download_count": 332013, + "project": "iso-639" + }, + { + "download_count": 331946, + "project": "daiquiri" + }, + { + "download_count": 331588, + "project": "tendo" + }, + { + "download_count": 331525, + "project": "spark-parser" + }, + { + "download_count": 331379, + "project": "setuptools-git-version" + }, + { + "download_count": 331153, + "project": "priority" + }, + { + "download_count": 330940, + "project": "cachelib" + }, + { + "download_count": 330879, + "project": "os-ken" + }, + { + "download_count": 330608, + "project": "microversion-parse" + }, + { + "download_count": 329253, + "project": "django-contrib-comments" + }, + { + "download_count": 329155, + "project": "o365" + }, + { + "download_count": 328801, + "project": "panda" + }, + { + "download_count": 328625, + "project": "ed25519" + }, + { + "download_count": 327877, + "project": "pyxb" + }, + { + "download_count": 327798, + "project": "rest-condition" + }, + { + "download_count": 327008, + "project": "pandavro" + }, + { + "download_count": 326932, + "project": "flask-autoindex" + }, + { + "download_count": 326745, + "project": "jieba3k" + }, + { + "download_count": 326444, + "project": "pipfile" + }, + { + "download_count": 325679, + "project": "js2xml" + }, + { + "download_count": 325610, + "project": "freetype-py" + }, + { + "download_count": 325570, + "project": "sigopt" + }, + { + "download_count": 325566, + "project": "flask-silk" + }, + { + "download_count": 325431, + "project": "pynvim" + }, + { + "download_count": 324936, + "project": "hunspell" + }, + { + "download_count": 324782, + "project": "pytest-localserver" + }, + { + "download_count": 324466, + "project": "genshi" + }, + { + "download_count": 324252, + "project": "pyqtgraph" + }, + { + "download_count": 324239, + "project": "backport-collections" + }, + { + "download_count": 324070, + "project": "daemonize" + }, + { + "download_count": 324045, + "project": "pafy" + }, + { + "download_count": 323910, + "project": "pyvcloud" + }, + { + "download_count": 322541, + "project": "imapclient" + }, + { + "download_count": 321480, + "project": "tika" + }, + { + "download_count": 321355, + "project": "simplekv" + }, + { + "download_count": 321196, + "project": "rtslib-fb" + }, + { + "download_count": 321126, + "project": "flake8-colors" + }, + { + "download_count": 321035, + "project": "helper" + }, + { + "download_count": 320909, + "project": "guessit" + }, + { + "download_count": 320580, + "project": "ryu" + }, + { + "download_count": 320316, + "project": "salt" + }, + { + "download_count": 320262, + "project": "flexmock" + }, + { + "download_count": 320230, + "project": "pytils" + }, + { + "download_count": 320212, + "project": "phik" + }, + { + "download_count": 319164, + "project": "sphinx-bootstrap-theme" + }, + { + "download_count": 319042, + "project": "flake8-pep3101" + }, + { + "download_count": 318722, + "project": "turicreate" + }, + { + "download_count": 318705, + "project": "attr" + }, + { + "download_count": 318586, + "project": "spyder-kernels" + }, + { + "download_count": 318398, + "project": "drf-writable-nested" + }, + { + "download_count": 318092, + "project": "future-fstrings" + }, + { + "download_count": 317793, + "project": "python-mistralclient" + }, + { + "download_count": 317688, + "project": "fuzzy" + }, + { + "download_count": 317529, + "project": "pyxlsb" + }, + { + "download_count": 317467, + "project": "twitter" + }, + { + "download_count": 317447, + "project": "slumber" + }, + { + "download_count": 316898, + "project": "protobuf-to-dict" + }, + { + "download_count": 316783, + "project": "djangorestframework-recursive" + }, + { + "download_count": 316760, + "project": "treeherder-client" + }, + { + "download_count": 316758, + "project": "python-nomad" + }, + { + "download_count": 316352, + "project": "click-default-group" + }, + { + "download_count": 316307, + "project": "logzero" + }, + { + "download_count": 316290, + "project": "orionsdk" + }, + { + "download_count": 316243, + "project": "sanic-cors" + }, + { + "download_count": 316239, + "project": "fastdtw" + }, + { + "download_count": 315929, + "project": "python-moztelemetry" + }, + { + "download_count": 315911, + "project": "pytest-azurepipelines" + }, + { + "download_count": 315673, + "project": "expects" + }, + { + "download_count": 314691, + "project": "feedfinder2" + }, + { + "download_count": 314446, + "project": "multimethod" + }, + { + "download_count": 314259, + "project": "janome" + }, + { + "download_count": 314133, + "project": "voluptuous-serialize" + }, + { + "download_count": 314097, + "project": "pyculiar" + }, + { + "download_count": 314051, + "project": "mozdownload" + }, + { + "download_count": 313826, + "project": "pylzma" + }, + { + "download_count": 313796, + "project": "qtawesome" + }, + { + "download_count": 313736, + "project": "everett" + }, + { + "download_count": 313653, + "project": "coincurve" + }, + { + "download_count": 313244, + "project": "characteristic" + }, + { + "download_count": 312696, + "project": "python-can" + }, + { + "download_count": 312614, + "project": "planout" + }, + { + "download_count": 312044, + "project": "submit50" + }, + { + "download_count": 312044, + "project": "transformers" + }, + { + "download_count": 311745, + "project": "django-celery-email" + }, + { + "download_count": 311632, + "project": "check50" + }, + { + "download_count": 311531, + "project": "ansimarkup" + }, + { + "download_count": 311273, + "project": "flatdict" + }, + { + "download_count": 311140, + "project": "minimal-snowplow-tracker" + }, + { + "download_count": 311122, + "project": "python-troveclient" + }, + { + "download_count": 310826, + "project": "pycpfcnpj" + }, + { + "download_count": 310446, + "project": "python-lzf" + }, + { + "download_count": 310429, + "project": "apsw" + }, + { + "download_count": 310269, + "project": "stem" + }, + { + "download_count": 310019, + "project": "mozinstall" + }, + { + "download_count": 309655, + "project": "os-resource-classes" + }, + { + "download_count": 309355, + "project": "mimeparse" + }, + { + "download_count": 309293, + "project": "comet-ml" + }, + { + "download_count": 309286, + "project": "serpy" + }, + { + "download_count": 309092, + "project": "skimage" + }, + { + "download_count": 308894, + "project": "pandas-ml" + }, + { + "download_count": 308548, + "project": "python-magnumclient" + }, + { + "download_count": 307984, + "project": "azure-devtools" + }, + { + "download_count": 307690, + "project": "typesentry" + }, + { + "download_count": 307277, + "project": "awslogs" + }, + { + "download_count": 306928, + "project": "pytest-flakes" + }, + { + "download_count": 306784, + "project": "thespian" + }, + { + "download_count": 305826, + "project": "pykcs11" + }, + { + "download_count": 305226, + "project": "singer-python" + }, + { + "download_count": 304755, + "project": "pyprind" + }, + { + "download_count": 304717, + "project": "abbyy" + }, + { + "download_count": 304490, + "project": "flask-restful-swagger" + }, + { + "download_count": 304399, + "project": "os-api-ref" + }, + { + "download_count": 304195, + "project": "simpleitk" + }, + { + "download_count": 304060, + "project": "unicorn" + }, + { + "download_count": 304021, + "project": "jobspy" + }, + { + "download_count": 303998, + "project": "devpi-common" + }, + { + "download_count": 303970, + "project": "jsonpath" + }, + { + "download_count": 303806, + "project": "pysubnettree" + }, + { + "download_count": 303693, + "project": "hypercorn" + }, + { + "download_count": 303592, + "project": "scrapy-random-useragent" + }, + { + "download_count": 303497, + "project": "zope-schema" + }, + { + "download_count": 303260, + "project": "newspaper3k" + }, + { + "download_count": 302739, + "project": "pyspellchecker" + }, + { + "download_count": 302714, + "project": "password" + }, + { + "download_count": 302400, + "project": "testlink-api-python-client" + }, + { + "download_count": 302299, + "project": "dogpile-core" + }, + { + "download_count": 302266, + "project": "nilearn" + }, + { + "download_count": 302076, + "project": "pylibftdi" + }, + { + "download_count": 301868, + "project": "python-termstyle" + }, + { + "download_count": 301830, + "project": "pybreaker" + }, + { + "download_count": 301435, + "project": "django-wkhtmltopdf" + }, + { + "download_count": 300585, + "project": "pyxdameraulevenshtein" + }, + { + "download_count": 300425, + "project": "hpsklearn" + }, + { + "download_count": 300421, + "project": "tesserocr" + }, + { + "download_count": 300359, + "project": "django-templated-mail" + }, + { + "download_count": 300207, + "project": "comet-git-pure" + }, + { + "download_count": 299910, + "project": "httpcore" + }, + { + "download_count": 299706, + "project": "simhash" + }, + { + "download_count": 299276, + "project": "aspy-refactor-imports" + }, + { + "download_count": 298943, + "project": "fcm-django" + }, + { + "download_count": 298927, + "project": "flask-jwt" + }, + { + "download_count": 298823, + "project": "serial" + }, + { + "download_count": 298802, + "project": "binary" + }, + { + "download_count": 298544, + "project": "plaidml" + }, + { + "download_count": 298085, + "project": "python-oauth2" + }, + { + "download_count": 297969, + "project": "opencv-contrib-python-headless" + }, + { + "download_count": 297585, + "project": "djangocms-text-ckeditor" + }, + { + "download_count": 297361, + "project": "better-exceptions-fork" + }, + { + "download_count": 297253, + "project": "dynamodb-json" + }, + { + "download_count": 297052, + "project": "bitmath" + }, + { + "download_count": 296269, + "project": "condor-git-config" + }, + { + "download_count": 296162, + "project": "cornice" + }, + { + "download_count": 295986, + "project": "polyglot" + }, + { + "download_count": 295722, + "project": "pytelegrambotapi" + }, + { + "download_count": 295667, + "project": "mbed-cloud-sdk" + }, + { + "download_count": 295592, + "project": "behave-django" + }, + { + "download_count": 295509, + "project": "modernize" + }, + { + "download_count": 295419, + "project": "libusb1" + }, + { + "download_count": 295355, + "project": "edx-organizations" + }, + { + "download_count": 294743, + "project": "sendgrid-django" + }, + { + "download_count": 294453, + "project": "sniffio" + }, + { + "download_count": 294364, + "project": "slugid" + }, + { + "download_count": 294093, + "project": "pypika" + }, + { + "download_count": 293799, + "project": "oci-cli" + }, + { + "download_count": 293404, + "project": "django-rosetta" + }, + { + "download_count": 293277, + "project": "proxmoxer" + }, + { + "download_count": 292761, + "project": "anytemplate" + }, + { + "download_count": 292649, + "project": "raven-aiohttp" + }, + { + "download_count": 292327, + "project": "bbcode" + }, + { + "download_count": 292281, + "project": "protego" + }, + { + "download_count": 292277, + "project": "securesystemslib" + }, + { + "download_count": 292249, + "project": "outcome" + }, + { + "download_count": 291695, + "project": "crontab" + }, + { + "download_count": 291636, + "project": "pytelegraf" + }, + { + "download_count": 291495, + "project": "pylbfgs" + }, + { + "download_count": 291341, + "project": "asttokens" + }, + { + "download_count": 291275, + "project": "wtforms-components" + }, + { + "download_count": 291039, + "project": "elasticsearch-async" + }, + { + "download_count": 290811, + "project": "py-dateutil" + }, + { + "download_count": 290793, + "project": "buildbot-worker" + }, + { + "download_count": 290753, + "project": "atpublic" + }, + { + "download_count": 290628, + "project": "django-cleanup" + }, + { + "download_count": 290574, + "project": "urlopen" + }, + { + "download_count": 290457, + "project": "cleanco" + }, + { + "download_count": 290025, + "project": "home-assistant-frontend" + }, + { + "download_count": 289983, + "project": "azureml-widgets" + }, + { + "download_count": 289907, + "project": "pycallgraph" + }, + { + "download_count": 289633, + "project": "biplist" + }, + { + "download_count": 289587, + "project": "django-datatables-view" + }, + { + "download_count": 289573, + "project": "guppy" + }, + { + "download_count": 289366, + "project": "kaggle" + }, + { + "download_count": 289053, + "project": "ratelimiter" + }, + { + "download_count": 288392, + "project": "requests-aws" + }, + { + "download_count": 288145, + "project": "prov" + }, + { + "download_count": 288066, + "project": "xmodem" + }, + { + "download_count": 287756, + "project": "pyobjc-framework-fsevents" + }, + { + "download_count": 287736, + "project": "djangorestframework-stubs" + }, + { + "download_count": 287716, + "project": "dailymotion" + }, + { + "download_count": 287610, + "project": "airspeed" + }, + { + "download_count": 287211, + "project": "pdfminer3k" + }, + { + "download_count": 286932, + "project": "django-admin-tools" + }, + { + "download_count": 286676, + "project": "rfc3339" + }, + { + "download_count": 286568, + "project": "runlike" + }, + { + "download_count": 286494, + "project": "pyobjc-framework-systemconfiguration" + }, + { + "download_count": 286287, + "project": "flask-swagger-ui" + }, + { + "download_count": 286286, + "project": "pyrabbit" + }, + { + "download_count": 286217, + "project": "pyobjc-framework-cfnetwork" + }, + { + "download_count": 285962, + "project": "django-htmlmin" + }, + { + "download_count": 285937, + "project": "affinegap" + }, + { + "download_count": 285640, + "project": "django-smart-selects" + }, + { + "download_count": 285368, + "project": "jaraco-classes" + }, + { + "download_count": 285182, + "project": "pyjq" + }, + { + "download_count": 284862, + "project": "plaidml-keras" + }, + { + "download_count": 284806, + "project": "pyobjc-framework-webkit" + }, + { + "download_count": 284790, + "project": "jq" + }, + { + "download_count": 284781, + "project": "django-taggit-serializer" + }, + { + "download_count": 284424, + "project": "robotframework-databaselibrary" + }, + { + "download_count": 284410, + "project": "httpsig-cffi" + }, + { + "download_count": 284050, + "project": "instaloader" + }, + { + "download_count": 284049, + "project": "powerline-status" + }, + { + "download_count": 283986, + "project": "tap-py" + }, + { + "download_count": 283939, + "project": "devpi-client" + }, + { + "download_count": 283785, + "project": "banal" + }, + { + "download_count": 283663, + "project": "docx" + }, + { + "download_count": 283563, + "project": "python-geoip-geolite2" + }, + { + "download_count": 283441, + "project": "bitstruct" + }, + { + "download_count": 283402, + "project": "pyramid-jinja2" + }, + { + "download_count": 283279, + "project": "graphitesend" + }, + { + "download_count": 283227, + "project": "metafone" + }, + { + "download_count": 283149, + "project": "tinysegmenter" + }, + { + "download_count": 282747, + "project": "sqlalchemy-continuum" + }, + { + "download_count": 282696, + "project": "opencensus-ext-stackdriver" + }, + { + "download_count": 282668, + "project": "waiter" + }, + { + "download_count": 282655, + "project": "sphinx-gallery" + }, + { + "download_count": 282575, + "project": "git-pylint-commit-hook" + }, + { + "download_count": 282479, + "project": "fuzzyset" + }, + { + "download_count": 282254, + "project": "pytest-custom-exit-code" + }, + { + "download_count": 281823, + "project": "hyperas" + }, + { + "download_count": 281726, + "project": "django-simple-captcha" + }, + { + "download_count": 281640, + "project": "dynamodb-encryption-sdk" + }, + { + "download_count": 281597, + "project": "openexr" + }, + { + "download_count": 281522, + "project": "pid" + }, + { + "download_count": 281467, + "project": "irc3-plugins-test" + }, + { + "download_count": 280788, + "project": "murmurhash3" + }, + { + "download_count": 280402, + "project": "quart" + }, + { + "download_count": 280081, + "project": "salesforce-bulkipy" + }, + { + "download_count": 279935, + "project": "sphinx-argparse" + }, + { + "download_count": 279690, + "project": "pptree" + }, + { + "download_count": 279227, + "project": "djangorestframework-jsonapi" + }, + { + "download_count": 279117, + "project": "marshmallow-polyfield" + }, + { + "download_count": 278996, + "project": "tls-syslog" + }, + { + "download_count": 278801, + "project": "fastprogress" + }, + { + "download_count": 278661, + "project": "style" + }, + { + "download_count": 278616, + "project": "pyjsparser" + }, + { + "download_count": 278381, + "project": "celery-redbeat" + }, + { + "download_count": 278041, + "project": "dbutils" + }, + { + "download_count": 277922, + "project": "zvmcloudconnector" + }, + { + "download_count": 277703, + "project": "blockdiag" + }, + { + "download_count": 277555, + "project": "jsl" + }, + { + "download_count": 277355, + "project": "aiomysql" + }, + { + "download_count": 277155, + "project": "softlayer" + }, + { + "download_count": 276993, + "project": "levenshtein-search" + }, + { + "download_count": 276886, + "project": "gender-guesser" + }, + { + "download_count": 276825, + "project": "msal" + }, + { + "download_count": 276567, + "project": "sqlalchemy-stubs" + }, + { + "download_count": 276536, + "project": "pyliblzma" + }, + { + "download_count": 276486, + "project": "django-sass-processor" + }, + { + "download_count": 276464, + "project": "django-url-filter" + }, + { + "download_count": 276353, + "project": "sanic-plugins-framework" + }, + { + "download_count": 276240, + "project": "jxmlease" + }, + { + "download_count": 275861, + "project": "purl" + }, + { + "download_count": 275254, + "project": "base36" + }, + { + "download_count": 275159, + "project": "pytools" + }, + { + "download_count": 275147, + "project": "datrie" + }, + { + "download_count": 274643, + "project": "zxcvbn-python" + }, + { + "download_count": 274395, + "project": "pytest-datafiles" + }, + { + "download_count": 273920, + "project": "pyspark-stubs" + }, + { + "download_count": 273728, + "project": "natto-py" + }, + { + "download_count": 273719, + "project": "mechanicalsoup" + }, + { + "download_count": 273603, + "project": "sqlalchemy-postgres-copy" + }, + { + "download_count": 273574, + "project": "pycosat" + }, + { + "download_count": 273348, + "project": "q" + }, + { + "download_count": 273202, + "project": "backpack" + }, + { + "download_count": 273056, + "project": "gmplot" + }, + { + "download_count": 273050, + "project": "websockify" + }, + { + "download_count": 273001, + "project": "measurement" + }, + { + "download_count": 272990, + "project": "hass-nabucasa" + }, + { + "download_count": 272948, + "project": "virtualenvwrapper-win" + }, + { + "download_count": 272942, + "project": "email" + }, + { + "download_count": 272542, + "project": "pyobjc-framework-launchservices" + }, + { + "download_count": 272383, + "project": "webdriver-manager" + }, + { + "download_count": 272315, + "project": "google-oauth" + }, + { + "download_count": 272029, + "project": "django-js-reverse" + }, + { + "download_count": 271929, + "project": "meinheld" + }, + { + "download_count": 271914, + "project": "yapsy" + }, + { + "download_count": 271877, + "project": "nteract-scrapbook" + }, + { + "download_count": 271874, + "project": "mouseinfo" + }, + { + "download_count": 271864, + "project": "pyobjc-framework-exceptionhandling" + }, + { + "download_count": 271786, + "project": "dbt" + }, + { + "download_count": 271483, + "project": "django-tagging" + }, + { + "download_count": 271439, + "project": "taskcluster" + }, + { + "download_count": 271349, + "project": "evdev" + }, + { + "download_count": 270918, + "project": "dedupe-hcluster" + }, + { + "download_count": 270898, + "project": "tensor2tensor" + }, + { + "download_count": 270014, + "project": "pymacaroons" + }, + { + "download_count": 269770, + "project": "kivy-garden" + }, + { + "download_count": 269533, + "project": "nine" + }, + { + "download_count": 269249, + "project": "highered" + }, + { + "download_count": 269216, + "project": "sounddevice" + }, + { + "download_count": 268421, + "project": "docx2txt" + }, + { + "download_count": 268411, + "project": "robotframework-debuglibrary" + }, + { + "download_count": 268172, + "project": "aioamqp" + }, + { + "download_count": 268107, + "project": "cma" + }, + { + "download_count": 267772, + "project": "netstruct" + }, + { + "download_count": 267766, + "project": "pyhacrf-datamade" + }, + { + "download_count": 267588, + "project": "flake8-junit-report" + }, + { + "download_count": 267292, + "project": "wptools" + }, + { + "download_count": 266807, + "project": "bump2version" + }, + { + "download_count": 266733, + "project": "lesscpy" + }, + { + "download_count": 266561, + "project": "pytest-vcr" + }, + { + "download_count": 266544, + "project": "pyexcel-webio" + }, + { + "download_count": 266422, + "project": "maya" + }, + { + "download_count": 266355, + "project": "robotframework-xvfb" + }, + { + "download_count": 266132, + "project": "dedupe" + }, + { + "download_count": 266017, + "project": "pyminifier" + }, + { + "download_count": 265818, + "project": "winkerberos" + }, + { + "download_count": 265798, + "project": "mozanalysis" + }, + { + "download_count": 265437, + "project": "username-generator" + }, + { + "download_count": 265328, + "project": "phpserialize" + }, + { + "download_count": 265105, + "project": "crc32c" + }, + { + "download_count": 264933, + "project": "pretrainedmodels" + }, + { + "download_count": 264845, + "project": "pytest-remotedata" + }, + { + "download_count": 264729, + "project": "python-owasp-zap-v2-4" + }, + { + "download_count": 264669, + "project": "nexpose" + }, + { + "download_count": 264414, + "project": "http-parser" + }, + { + "download_count": 264412, + "project": "pyobjc-framework-diskarbitration" + }, + { + "download_count": 264322, + "project": "dsp3" + }, + { + "download_count": 264189, + "project": "rlr" + }, + { + "download_count": 263902, + "project": "pyqt5-tools" + }, + { + "download_count": 263840, + "project": "json-tricks" + }, + { + "download_count": 263390, + "project": "categorical-distance" + }, + { + "download_count": 263282, + "project": "datalab" + }, + { + "download_count": 263021, + "project": "update" + }, + { + "download_count": 262783, + "project": "blobfile" + }, + { + "download_count": 262644, + "project": "zc-buildout" + }, + { + "download_count": 262529, + "project": "dedupe-variable-datetime" + }, + { + "download_count": 262152, + "project": "simplecosine" + }, + { + "download_count": 261988, + "project": "pytest-mockito" + }, + { + "download_count": 261860, + "project": "django-otp-twilio" + }, + { + "download_count": 261797, + "project": "django-chartit" + }, + { + "download_count": 261611, + "project": "datetime-distance" + }, + { + "download_count": 260878, + "project": "jaraco-text" + }, + { + "download_count": 260837, + "project": "fastrlock" + }, + { + "download_count": 260816, + "project": "flake8-future-import" + }, + { + "download_count": 260795, + "project": "pyghmi" + }, + { + "download_count": 260576, + "project": "orator" + }, + { + "download_count": 260536, + "project": "flake8-tuple" + }, + { + "download_count": 260250, + "project": "aiocache" + }, + { + "download_count": 260202, + "project": "cli53" + }, + { + "download_count": 260043, + "project": "untokenize" + }, + { + "download_count": 259904, + "project": "newrelic-plugin-agent" + }, + { + "download_count": 259773, + "project": "pyangbind" + }, + { + "download_count": 259756, + "project": "django-pyodbc-azure" + }, + { + "download_count": 259273, + "project": "zstd" + }, + { + "download_count": 258974, + "project": "pymodbus" + }, + { + "download_count": 258942, + "project": "jupyter-spark" + }, + { + "download_count": 258875, + "project": "django-sortedm2m" + }, + { + "download_count": 258300, + "project": "python-logstash-async" + }, + { + "download_count": 258254, + "project": "django-graphql-jwt" + }, + { + "download_count": 257389, + "project": "elasticquery" + }, + { + "download_count": 257227, + "project": "python-keycloak" + }, + { + "download_count": 257086, + "project": "dbus-python" + }, + { + "download_count": 257005, + "project": "cmarkgfm" + }, + { + "download_count": 256972, + "project": "pysrt" + }, + { + "download_count": 256801, + "project": "pyobjc-framework-coreservices" + }, + { + "download_count": 256683, + "project": "django-paypal" + }, + { + "download_count": 256576, + "project": "spur" + }, + { + "download_count": 256447, + "project": "iniparse" + }, + { + "download_count": 256111, + "project": "python-terraform" + }, + { + "download_count": 255860, + "project": "djangorestframework-jsonp" + }, + { + "download_count": 255835, + "project": "rethinkdb" + }, + { + "download_count": 255719, + "project": "mozcrash" + }, + { + "download_count": 255201, + "project": "pyobjc-framework-quartz" + }, + { + "download_count": 254935, + "project": "django-organizations" + }, + { + "download_count": 254677, + "project": "django-colorfield" + }, + { + "download_count": 254646, + "project": "marshmallow-jsonapi" + }, + { + "download_count": 254107, + "project": "djangorestframework-expander" + }, + { + "download_count": 253885, + "project": "dci-utils" + }, + { + "download_count": 253884, + "project": "pql" + }, + { + "download_count": 253867, + "project": "tf-nightly-2-0-preview" + }, + { + "download_count": 253608, + "project": "django-parler" + }, + { + "download_count": 253475, + "project": "telethon" + }, + { + "download_count": 253099, + "project": "celery-once" + }, + { + "download_count": 253054, + "project": "scales" + }, + { + "download_count": 253035, + "project": "rocketchat-api" + }, + { + "download_count": 252896, + "project": "jaraco-collections" + }, + { + "download_count": 252760, + "project": "yaql" + }, + { + "download_count": 252588, + "project": "pyinquirer" + }, + { + "download_count": 252471, + "project": "django-session-security" + }, + { + "download_count": 252413, + "project": "django-rest-knox" + }, + { + "download_count": 252295, + "project": "django-redshift-backend" + }, + { + "download_count": 251901, + "project": "sphinx-markdown-tables" + }, + { + "download_count": 251862, + "project": "sceptre" + }, + { + "download_count": 251840, + "project": "py-mini-racer" + }, + { + "download_count": 251759, + "project": "python-rake" + }, + { + "download_count": 251594, + "project": "oauth2-client" + }, + { + "download_count": 251347, + "project": "env" + }, + { + "download_count": 251337, + "project": "timedelta" + }, + { + "download_count": 250784, + "project": "awkward" + }, + { + "download_count": 250362, + "project": "edx-rbac" + }, + { + "download_count": 250192, + "project": "flask-log-request-id" + }, + { + "download_count": 250110, + "project": "globre" + }, + { + "download_count": 249752, + "project": "django-easy-pdf" + }, + { + "download_count": 249646, + "project": "prettyexc" + }, + { + "download_count": 249416, + "project": "django-notifications-hq" + }, + { + "download_count": 249316, + "project": "mozleak" + }, + { + "download_count": 249286, + "project": "autograd-gamma" + }, + { + "download_count": 249216, + "project": "flask-injector" + }, + { + "download_count": 249101, + "project": "holoviews" + }, + { + "download_count": 249064, + "project": "inflector" + }, + { + "download_count": 248895, + "project": "django-honeypot" + }, + { + "download_count": 248839, + "project": "pip-api" + }, + { + "download_count": 248670, + "project": "pytest-testmon" + }, + { + "download_count": 248527, + "project": "pycapnp" + }, + { + "download_count": 248395, + "project": "pgpy" + }, + { + "download_count": 248134, + "project": "pretend" + }, + { + "download_count": 247952, + "project": "webhelpers" + }, + { + "download_count": 247612, + "project": "iso4217" + }, + { + "download_count": 247588, + "project": "chargebee" + }, + { + "download_count": 247194, + "project": "logging-tree" + }, + { + "download_count": 247097, + "project": "bcolz" + }, + { + "download_count": 247095, + "project": "pydomo" + }, + { + "download_count": 247093, + "project": "pyviz-comms" + }, + { + "download_count": 246905, + "project": "pyes" + }, + { + "download_count": 246637, + "project": "patool" + }, + { + "download_count": 246609, + "project": "django-saml2-auth" + }, + { + "download_count": 246442, + "project": "lorem" + }, + { + "download_count": 246345, + "project": "kociemba" + }, + { + "download_count": 245924, + "project": "nylas" + }, + { + "download_count": 245599, + "project": "urlparse3" + }, + { + "download_count": 245592, + "project": "pytest-tornado" + }, + { + "download_count": 245425, + "project": "inject" + }, + { + "download_count": 244242, + "project": "tabledata" + }, + { + "download_count": 244197, + "project": "percy" + }, + { + "download_count": 243680, + "project": "snitun" + }, + { + "download_count": 243665, + "project": "django-debug-toolbar-line-profiler" + }, + { + "download_count": 243077, + "project": "bottlenose" + }, + { + "download_count": 242781, + "project": "infi-clickhouse-orm" + }, + { + "download_count": 242659, + "project": "reppy" + }, + { + "download_count": 242378, + "project": "in-toto" + }, + { + "download_count": 242112, + "project": "azureml" + }, + { + "download_count": 242067, + "project": "django-common-helpers" + }, + { + "download_count": 241994, + "project": "django-hijack-admin" + }, + { + "download_count": 241868, + "project": "cmreshandler" + }, + { + "download_count": 241645, + "project": "ruptures" + }, + { + "download_count": 241594, + "project": "goslate" + }, + { + "download_count": 241370, + "project": "aggdraw" + }, + { + "download_count": 241223, + "project": "django-boto" + }, + { + "download_count": 240546, + "project": "svn" + }, + { + "download_count": 240121, + "project": "ssh" + }, + { + "download_count": 240049, + "project": "py3dns" + }, + { + "download_count": 239971, + "project": "pymonkey" + }, + { + "download_count": 239838, + "project": "great-expectations" + }, + { + "download_count": 239830, + "project": "pip-custom-platform" + }, + { + "download_count": 239729, + "project": "django-libsass" + }, + { + "download_count": 239683, + "project": "mirakuru" + }, + { + "download_count": 239680, + "project": "microsoftgraph-python" + }, + { + "download_count": 239524, + "project": "gnocchiclient" + }, + { + "download_count": 239407, + "project": "pyct" + }, + { + "download_count": 239390, + "project": "ansible-runner" + }, + { + "download_count": 239360, + "project": "dbt-core" + }, + { + "download_count": 239183, + "project": "hellosign-python-sdk" + }, + { + "download_count": 239095, + "project": "pyaudioanalysis" + }, + { + "download_count": 239001, + "project": "reportportal-client" + }, + { + "download_count": 238983, + "project": "itunes-iap" + }, + { + "download_count": 238603, + "project": "terminalone" + }, + { + "download_count": 238597, + "project": "snaptime" + }, + { + "download_count": 238394, + "project": "aiormq" + }, + { + "download_count": 238154, + "project": "djangocms-attributes-field" + }, + { + "download_count": 238141, + "project": "django-versatileimagefield" + }, + { + "download_count": 237972, + "project": "django-push-notifications" + }, + { + "download_count": 237750, + "project": "transliterate" + }, + { + "download_count": 237652, + "project": "whaaaaat" + }, + { + "download_count": 237622, + "project": "django-sslify" + }, + { + "download_count": 237558, + "project": "towncrier" + }, + { + "download_count": 237018, + "project": "py-lz4framed" + }, + { + "download_count": 236912, + "project": "uproot-methods" + }, + { + "download_count": 236619, + "project": "django-statici18n" + }, + { + "download_count": 236529, + "project": "pytd" + }, + { + "download_count": 236270, + "project": "pep517" + }, + { + "download_count": 236180, + "project": "py-ecc" + }, + { + "download_count": 236180, + "project": "layered-yaml-attrdict-config" + }, + { + "download_count": 235952, + "project": "varint" + }, + { + "download_count": 235921, + "project": "spotipy" + }, + { + "download_count": 235732, + "project": "django-markdown-deux" + }, + { + "download_count": 235635, + "project": "geventhttpclient-wheels" + }, + { + "download_count": 235481, + "project": "parallel-ssh" + }, + { + "download_count": 235241, + "project": "event-tracking" + }, + { + "download_count": 234835, + "project": "jupyterthemes" + }, + { + "download_count": 234721, + "project": "django-pandas" + }, + { + "download_count": 234582, + "project": "stackprinter" + }, + { + "download_count": 234393, + "project": "probablepeople" + }, + { + "download_count": 234334, + "project": "flake8-eradicate" + }, + { + "download_count": 234277, + "project": "mode" + }, + { + "download_count": 234271, + "project": "asset" + }, + { + "download_count": 234150, + "project": "loggly-python-handler" + }, + { + "download_count": 233705, + "project": "supervisor-wildcards" + }, + { + "download_count": 233601, + "project": "edx-bulk-grades" + }, + { + "download_count": 233407, + "project": "glean-parser" + }, + { + "download_count": 233242, + "project": "morfessor" + }, + { + "download_count": 233191, + "project": "pyzbar" + }, + { + "download_count": 232874, + "project": "nbstripout" + }, + { + "download_count": 232838, + "project": "mnemonic" + }, + { + "download_count": 232704, + "project": "pyeclib" + }, + { + "download_count": 232607, + "project": "flask-sockets" + }, + { + "download_count": 232578, + "project": "esrally" + }, + { + "download_count": 232565, + "project": "django-crontab" + }, + { + "download_count": 232517, + "project": "standardjson" + }, + { + "download_count": 232389, + "project": "sphinxcontrib-svg2pdfconverter" + }, + { + "download_count": 232208, + "project": "jep" + }, + { + "download_count": 231947, + "project": "contractions" + }, + { + "download_count": 231914, + "project": "hashlib" + }, + { + "download_count": 231894, + "project": "hdrhistogram" + }, + { + "download_count": 231873, + "project": "pydoe" + }, + { + "download_count": 231818, + "project": "colorhash" + }, + { + "download_count": 231678, + "project": "venv-update" + }, + { + "download_count": 231678, + "project": "pytidylib" + }, + { + "download_count": 231634, + "project": "sas7bdat" + }, + { + "download_count": 231555, + "project": "pybrain" + }, + { + "download_count": 231491, + "project": "locust" + }, + { + "download_count": 231449, + "project": "easygui" + }, + { + "download_count": 231322, + "project": "pytest-qt" + }, + { + "download_count": 231297, + "project": "prance" + }, + { + "download_count": 231250, + "project": "nose-ignore-docstring" + }, + { + "download_count": 231113, + "project": "snakeviz" + }, + { + "download_count": 231027, + "project": "pygaljs" + }, + { + "download_count": 230954, + "project": "rainbow-saddle" + }, + { + "download_count": 230879, + "project": "wsgiref" + }, + { + "download_count": 230659, + "project": "django-config-models" + }, + { + "download_count": 230631, + "project": "django-partial-index" + }, + { + "download_count": 230614, + "project": "restrictedpython" + }, + { + "download_count": 230470, + "project": "consulate" + }, + { + "download_count": 230441, + "project": "django-s3-storage" + }, + { + "download_count": 230436, + "project": "jenkins" + }, + { + "download_count": 230427, + "project": "mtranslate" + }, + { + "download_count": 230393, + "project": "aiosmtplib" + }, + { + "download_count": 230248, + "project": "django-statsd-mozilla" + }, + { + "download_count": 229850, + "project": "ffmpeg" + }, + { + "download_count": 229620, + "project": "django-ranged-response" + }, + { + "download_count": 229579, + "project": "pytest-cover" + }, + { + "download_count": 229403, + "project": "flexget" + }, + { + "download_count": 229292, + "project": "django-cachalot" + }, + { + "download_count": 229142, + "project": "django-activity-stream" + }, + { + "download_count": 229046, + "project": "daemonocle" + }, + { + "download_count": 228702, + "project": "mimerender" + }, + { + "download_count": 228552, + "project": "mathematics-dataset" + }, + { + "download_count": 228521, + "project": "money" + }, + { + "download_count": 228488, + "project": "flake8-formatter-junit-xml" + }, + { + "download_count": 228281, + "project": "python-vagrant" + }, + { + "download_count": 228240, + "project": "parquet" + }, + { + "download_count": 228235, + "project": "asciimatics" + }, + { + "download_count": 228066, + "project": "singleton-decorator" + }, + { + "download_count": 228004, + "project": "petl" + }, + { + "download_count": 227997, + "project": "dogpile" + }, + { + "download_count": 227746, + "project": "beaver" + }, + { + "download_count": 227738, + "project": "dbt-postgres" + }, + { + "download_count": 227570, + "project": "patch-ng" + }, + { + "download_count": 227212, + "project": "pytest-replay" + }, + { + "download_count": 227202, + "project": "django-settings-export" + }, + { + "download_count": 227048, + "project": "traittypes" + }, + { + "download_count": 227010, + "project": "ipcalc" + }, + { + "download_count": 226931, + "project": "django-elasticache" + }, + { + "download_count": 226656, + "project": "pywsd" + }, + { + "download_count": 226426, + "project": "flask-kvsession" + }, + { + "download_count": 226328, + "project": "pytest-logging" + }, + { + "download_count": 226143, + "project": "java-random" + }, + { + "download_count": 226134, + "project": "flask-seasurf" + }, + { + "download_count": 226129, + "project": "posix-ipc" + }, + { + "download_count": 226063, + "project": "zconfig" + }, + { + "download_count": 225964, + "project": "flask-uuid" + }, + { + "download_count": 225932, + "project": "djangorestframework-oauth" + }, + { + "download_count": 225898, + "project": "nest-asyncio" + }, + { + "download_count": 225852, + "project": "flock" + }, + { + "download_count": 225551, + "project": "taskcluster-urls" + }, + { + "download_count": 225391, + "project": "cntk" + }, + { + "download_count": 224972, + "project": "lolcat" + }, + { + "download_count": 224933, + "project": "pyramid-beaker" + }, + { + "download_count": 224799, + "project": "pytest-allure-adaptor" + }, + { + "download_count": 224606, + "project": "openapi-core" + }, + { + "download_count": 224528, + "project": "jaraco-itertools" + }, + { + "download_count": 224426, + "project": "emcee" + }, + { + "download_count": 224246, + "project": "trio" + }, + { + "download_count": 224218, + "project": "plotly-express" + }, + { + "download_count": 224064, + "project": "hexdump" + }, + { + "download_count": 224043, + "project": "binpacking" + }, + { + "download_count": 224021, + "project": "babelfish" + }, + { + "download_count": 223853, + "project": "bincrafters-package-tools" + }, + { + "download_count": 223736, + "project": "edx-rest-api-client" + }, + { + "download_count": 223721, + "project": "rstcheck" + }, + { + "download_count": 223494, + "project": "pylogo" + }, + { + "download_count": 223248, + "project": "h2o-pysparkling-2-3" + }, + { + "download_count": 223214, + "project": "pybloom" + }, + { + "download_count": 222931, + "project": "python3-memcached" + }, + { + "download_count": 222858, + "project": "conda" + }, + { + "download_count": 222781, + "project": "confusable-homoglyphs" + }, + { + "download_count": 222739, + "project": "loky" + }, + { + "download_count": 222684, + "project": "super-csv" + }, + { + "download_count": 222634, + "project": "jprops" + }, + { + "download_count": 222587, + "project": "keyvaultlib" + }, + { + "download_count": 222554, + "project": "fbmessenger" + }, + { + "download_count": 222508, + "project": "wiremock" + }, + { + "download_count": 222412, + "project": "django-prettyjson" + }, + { + "download_count": 222176, + "project": "hug" + }, + { + "download_count": 222175, + "project": "mws" + }, + { + "download_count": 221970, + "project": "dash-daq" + }, + { + "download_count": 221895, + "project": "slycot" + }, + { + "download_count": 221892, + "project": "flask-uploads" + }, + { + "download_count": 221647, + "project": "alooma" + }, + { + "download_count": 221631, + "project": "muffnn" + }, + { + "download_count": 221604, + "project": "python-gettext" + }, + { + "download_count": 221598, + "project": "civisml-extensions" + }, + { + "download_count": 221440, + "project": "jaydebeapi3" + }, + { + "download_count": 221407, + "project": "scikit-plot" + }, + { + "download_count": 220993, + "project": "twitter-ads" + }, + { + "download_count": 220495, + "project": "pandoc" + }, + { + "download_count": 220301, + "project": "nplusone" + }, + { + "download_count": 220198, + "project": "sudachipy" + }, + { + "download_count": 220107, + "project": "django-render-block" + }, + { + "download_count": 219983, + "project": "pyrebase" + }, + { + "download_count": 219731, + "project": "fabric2" + }, + { + "download_count": 219711, + "project": "cloudfoundry-client" + }, + { + "download_count": 219544, + "project": "edx-completion" + }, + { + "download_count": 219404, + "project": "tabulator" + }, + { + "download_count": 219376, + "project": "django-cron" + }, + { + "download_count": 219261, + "project": "sk-video" + }, + { + "download_count": 219216, + "project": "zope-i18nmessageid" + }, + { + "download_count": 218973, + "project": "colorful" + }, + { + "download_count": 218307, + "project": "s4cmd" + }, + { + "download_count": 218171, + "project": "pychromecast" + }, + { + "download_count": 218073, + "project": "pyvisa" + }, + { + "download_count": 217824, + "project": "bok-choy" + }, + { + "download_count": 217614, + "project": "py-zipkin" + }, + { + "download_count": 217311, + "project": "ansible-modules-hashivault" + }, + { + "download_count": 217201, + "project": "datefinder" + }, + { + "download_count": 217188, + "project": "json-logic-qubit" + }, + { + "download_count": 216980, + "project": "sparse-dot-topn" + }, + { + "download_count": 216825, + "project": "flask-dance" + }, + { + "download_count": 216707, + "project": "aiml" + }, + { + "download_count": 216645, + "project": "certipy" + }, + { + "download_count": 216205, + "project": "area" + }, + { + "download_count": 216115, + "project": "sphinx-click" + }, + { + "download_count": 215902, + "project": "pylint-common" + }, + { + "download_count": 215763, + "project": "stompest" + }, + { + "download_count": 215715, + "project": "questionary" + }, + { + "download_count": 215011, + "project": "lupa" + }, + { + "download_count": 214880, + "project": "usbinfo" + }, + { + "download_count": 214864, + "project": "marshmallow-objects" + }, + { + "download_count": 214855, + "project": "django-encrypted-filefield" + }, + { + "download_count": 214793, + "project": "kerberos" + }, + { + "download_count": 214757, + "project": "isim" + }, + { + "download_count": 214507, + "project": "flask-moment" + }, + { + "download_count": 214468, + "project": "boto3-session-cache" + }, + { + "download_count": 214280, + "project": "yacs" + }, + { + "download_count": 214088, + "project": "bigquery-python" + }, + { + "download_count": 213952, + "project": "mobly" + }, + { + "download_count": 213688, + "project": "pyethash" + }, + { + "download_count": 213494, + "project": "django-colorful" + }, + { + "download_count": 213445, + "project": "ics" + }, + { + "download_count": 213185, + "project": "eyes-selenium" + }, + { + "download_count": 213156, + "project": "zdesk" + }, + { + "download_count": 213151, + "project": "requests-credssp" + }, + { + "download_count": 213071, + "project": "autosemver" + }, + { + "download_count": 212879, + "project": "ffx" + }, + { + "download_count": 212740, + "project": "wn" + }, + { + "download_count": 212739, + "project": "linear-tsv" + }, + { + "download_count": 212738, + "project": "webexteamssdk" + }, + { + "download_count": 212640, + "project": "circus" + }, + { + "download_count": 212529, + "project": "multiaddr" + }, + { + "download_count": 212516, + "project": "zipcode" + }, + { + "download_count": 212435, + "project": "dbt-bigquery" + }, + { + "download_count": 212295, + "project": "androguard" + }, + { + "download_count": 212275, + "project": "gapic-google-cloud-spanner-v1" + }, + { + "download_count": 212211, + "project": "gapic-google-cloud-spanner-admin-database-v1" + }, + { + "download_count": 212204, + "project": "gapic-google-cloud-spanner-admin-instance-v1" + }, + { + "download_count": 212074, + "project": "proto-google-cloud-spanner-v1" + }, + { + "download_count": 211988, + "project": "pip-review" + }, + { + "download_count": 211861, + "project": "passwordmeter" + }, + { + "download_count": 211783, + "project": "dbt-redshift" + }, + { + "download_count": 211766, + "project": "proto-google-cloud-spanner-admin-database-v1" + }, + { + "download_count": 211758, + "project": "proto-google-cloud-spanner-admin-instance-v1" + }, + { + "download_count": 211695, + "project": "python-prctl" + }, + { + "download_count": 211523, + "project": "dbt-snowflake" + }, + { + "download_count": 211483, + "project": "aws-kinesis-agg" + }, + { + "download_count": 211368, + "project": "pwntools" + }, + { + "download_count": 211309, + "project": "fs-s3fs" + }, + { + "download_count": 211286, + "project": "cloudshell-automation-api" + }, + { + "download_count": 211188, + "project": "postgres" + }, + { + "download_count": 211130, + "project": "pymeta3" + }, + { + "download_count": 210970, + "project": "robotframework-jsonlibrary" + }, + { + "download_count": 210929, + "project": "conllu" + }, + { + "download_count": 210633, + "project": "rpi-gpio" + }, + { + "download_count": 210596, + "project": "aresponses" + }, + { + "download_count": 210520, + "project": "textacy" + }, + { + "download_count": 210501, + "project": "djangocms-link" + }, + { + "download_count": 210080, + "project": "uproot" + }, + { + "download_count": 209987, + "project": "django-fsm-admin" + }, + { + "download_count": 209975, + "project": "anybadge" + }, + { + "download_count": 209424, + "project": "clearbit" + }, + { + "download_count": 209150, + "project": "fakenewsredis" + }, + { + "download_count": 209126, + "project": "sdnotify" + }, + { + "download_count": 209028, + "project": "python-baseconv" + }, + { + "download_count": 208950, + "project": "pytest-dotenv" + }, + { + "download_count": 208654, + "project": "pytest-logger" + }, + { + "download_count": 208524, + "project": "c7n" + }, + { + "download_count": 208338, + "project": "webium" + }, + { + "download_count": 208232, + "project": "eliot" + }, + { + "download_count": 208191, + "project": "anaconda" + }, + { + "download_count": 208167, + "project": "zope-configuration" + }, + { + "download_count": 208131, + "project": "talon" + }, + { + "download_count": 208092, + "project": "django-split-settings" + }, + { + "download_count": 207912, + "project": "elasticsearch6" + }, + { + "download_count": 207665, + "project": "cx-freeze" + }, + { + "download_count": 207551, + "project": "pyclipper" + }, + { + "download_count": 207474, + "project": "duo-web" + }, + { + "download_count": 207412, + "project": "django-easy-select2" + }, + { + "download_count": 207319, + "project": "pytricia" + }, + { + "download_count": 207241, + "project": "pyecharts" + }, + { + "download_count": 207068, + "project": "zendesk" + }, + { + "download_count": 206988, + "project": "zodbpickle" + }, + { + "download_count": 206923, + "project": "scout-apm" + }, + { + "download_count": 206832, + "project": "contexttimer" + }, + { + "download_count": 206379, + "project": "ngxtop" + }, + { + "download_count": 206215, + "project": "python-xmp-toolkit" + }, + { + "download_count": 205992, + "project": "redlock" + }, + { + "download_count": 205889, + "project": "smartypants" + }, + { + "download_count": 205562, + "project": "flake8-coding" + }, + { + "download_count": 205284, + "project": "zodb" + }, + { + "download_count": 205270, + "project": "django-reversion-compare" + }, + { + "download_count": 205192, + "project": "html-linter" + }, + { + "download_count": 205141, + "project": "client" + }, + { + "download_count": 205070, + "project": "backports-shutil-which" + }, + { + "download_count": 204937, + "project": "frida" + }, + { + "download_count": 204809, + "project": "dawg-python" + }, + { + "download_count": 204696, + "project": "django-transaction-hooks" + }, + { + "download_count": 204486, + "project": "aiotask-context" + }, + { + "download_count": 204328, + "project": "lazy-property" + }, + { + "download_count": 204268, + "project": "urlparse2" + }, + { + "download_count": 204251, + "project": "template-remover" + }, + { + "download_count": 204130, + "project": "pyttsx3" + }, + { + "download_count": 204053, + "project": "mesh-tensorflow" + }, + { + "download_count": 203892, + "project": "django-crum" + }, + { + "download_count": 203786, + "project": "asciitree" + }, + { + "download_count": 203548, + "project": "flake8-deprecated" + }, + { + "download_count": 203495, + "project": "weberror" + }, + { + "download_count": 203493, + "project": "shudder" + }, + { + "download_count": 203310, + "project": "dash-auth" + }, + { + "download_count": 203161, + "project": "rasa-nlu" + }, + { + "download_count": 203073, + "project": "conf-d" + }, + { + "download_count": 202765, + "project": "django-slack" + }, + { + "download_count": 202648, + "project": "pocketsphinx" + }, + { + "download_count": 202044, + "project": "pydivert" + }, + { + "download_count": 202007, + "project": "blosc" + }, + { + "download_count": 201958, + "project": "zipstream" + }, + { + "download_count": 201831, + "project": "parallel-sync" + }, + { + "download_count": 201651, + "project": "pycuda" + }, + { + "download_count": 201622, + "project": "ta-lib" + }, + { + "download_count": 201459, + "project": "jmxquery" + }, + { + "download_count": 201457, + "project": "tabula-py" + }, + { + "download_count": 201395, + "project": "pytest-flask-sqlalchemy" + }, + { + "download_count": 201101, + "project": "collectd" + }, + { + "download_count": 201096, + "project": "django-rest-multiple-models" + }, + { + "download_count": 201084, + "project": "pyobjc-framework-coretext" + }, + { + "download_count": 200633, + "project": "smart-getenv" + }, + { + "download_count": 200507, + "project": "pyramid-retry" + }, + { + "download_count": 200444, + "project": "codeclimate-test-reporter" + }, + { + "download_count": 200411, + "project": "publicsuffixlist" + }, + { + "download_count": 200394, + "project": "algoliasearch-django" + }, + { + "download_count": 200267, + "project": "pytest-salt" + }, + { + "download_count": 200235, + "project": "pytest-doctestplus" + }, + { + "download_count": 200035, + "project": "zope-lifecycleevent" + }, + { + "download_count": 199808, + "project": "python-zaqarclient" + }, + { + "download_count": 199774, + "project": "iniherit" + }, + { + "download_count": 199753, + "project": "pymorphy2-dicts" + }, + { + "download_count": 199695, + "project": "hanging-threads" + }, + { + "download_count": 199645, + "project": "flask-classful" + }, + { + "download_count": 199602, + "project": "pyrad" + }, + { + "download_count": 199568, + "project": "jsoncompare" + }, + { + "download_count": 199376, + "project": "python-graph-core" + }, + { + "download_count": 199234, + "project": "flask-mysqldb" + }, + { + "download_count": 199123, + "project": "pymorphy2" + }, + { + "download_count": 199116, + "project": "uncertainties" + }, + { + "download_count": 198904, + "project": "jdatetime" + }, + { + "download_count": 198768, + "project": "package" + }, + { + "download_count": 198699, + "project": "django-user-sessions" + }, + { + "download_count": 198662, + "project": "jproperties" + }, + { + "download_count": 198655, + "project": "optional-django" + }, + { + "download_count": 198573, + "project": "azure-mgmt-common" + }, + { + "download_count": 198386, + "project": "csscompressor" + }, + { + "download_count": 198360, + "project": "robotframework-lint" + }, + { + "download_count": 198297, + "project": "bintrees" + }, + { + "download_count": 198099, + "project": "esptool" + }, + { + "download_count": 198014, + "project": "sox" + }, + { + "download_count": 197847, + "project": "cotyledon" + }, + { + "download_count": 197484, + "project": "kafka-utils" + }, + { + "download_count": 197448, + "project": "pingparsing" + }, + { + "download_count": 197436, + "project": "semidbm" + }, + { + "download_count": 197405, + "project": "polyaxon-schemas" + }, + { + "download_count": 196830, + "project": "python-mozaggregator" + }, + { + "download_count": 196757, + "project": "pandas-summary" + }, + { + "download_count": 196390, + "project": "nbval" + }, + { + "download_count": 196154, + "project": "python3-xlib" + }, + { + "download_count": 195862, + "project": "pyobjc-framework-coredata" + }, + { + "download_count": 195697, + "project": "django-json-widget" + }, + { + "download_count": 194638, + "project": "trimesh" + }, + { + "download_count": 194604, + "project": "pyobjc-framework-addressbook" + }, + { + "download_count": 194552, + "project": "sq-blocks" + }, + { + "download_count": 194524, + "project": "simple-crypt" + }, + { + "download_count": 194469, + "project": "imgkit" + }, + { + "download_count": 194216, + "project": "pytype" + }, + { + "download_count": 193866, + "project": "aiohttp-session" + }, + { + "download_count": 193810, + "project": "lib" + }, + { + "download_count": 193713, + "project": "pyobjc-framework-screensaver" + }, + { + "download_count": 193702, + "project": "remote-pdb" + }, + { + "download_count": 193646, + "project": "pyobjc-framework-syncservices" + }, + { + "download_count": 193463, + "project": "pyobjc-framework-scriptingbridge" + }, + { + "download_count": 193206, + "project": "glmnet-py" + }, + { + "download_count": 193173, + "project": "edx-django-release-util" + }, + { + "download_count": 193118, + "project": "pyobjc-framework-corelocation" + }, + { + "download_count": 193105, + "project": "pyobjc-framework-inputmethodkit" + }, + { + "download_count": 193099, + "project": "lob" + }, + { + "download_count": 192939, + "project": "deb-pkg-tools" + }, + { + "download_count": 192929, + "project": "traits" + }, + { + "download_count": 192741, + "project": "django-revproxy" + }, + { + "download_count": 192721, + "project": "edx-submissions" + }, + { + "download_count": 192662, + "project": "simpy" + }, + { + "download_count": 192636, + "project": "ebooklib" + }, + { + "download_count": 192632, + "project": "importlab" + }, + { + "download_count": 192581, + "project": "tweet-preprocessor" + }, + { + "download_count": 192462, + "project": "eight" + }, + { + "download_count": 192349, + "project": "edx-when" + }, + { + "download_count": 192282, + "project": "telepot" + }, + { + "download_count": 192227, + "project": "django-recaptcha2" + }, + { + "download_count": 192174, + "project": "fastjsonschema" + }, + { + "download_count": 191971, + "project": "rebulk" + }, + { + "download_count": 191767, + "project": "zope-dottedname" + }, + { + "download_count": 191702, + "project": "cli-proton-python" + }, + { + "download_count": 191581, + "project": "schema-salad" + }, + { + "download_count": 191533, + "project": "progressbar33" + }, + { + "download_count": 191495, + "project": "libnacl" + }, + { + "download_count": 191407, + "project": "mattermostwrapper" + }, + { + "download_count": 191403, + "project": "mox" + }, + { + "download_count": 191379, + "project": "esprima" + }, + { + "download_count": 191100, + "project": "tf-nightly-gpu" + }, + { + "download_count": 191091, + "project": "python-firebase" + }, + { + "download_count": 190890, + "project": "flake8-bandit" + }, + { + "download_count": 190752, + "project": "python3-logstash" + }, + { + "download_count": 190743, + "project": "pyutilib" + }, + { + "download_count": 190491, + "project": "easypost" + }, + { + "download_count": 190474, + "project": "web-fragments" + }, + { + "download_count": 190430, + "project": "pytest-coverage" + }, + { + "download_count": 190275, + "project": "mailjet-rest" + }, + { + "download_count": 190267, + "project": "riemann-client" + }, + { + "download_count": 190168, + "project": "pytest-test-groups" + }, + { + "download_count": 189997, + "project": "dialogflow" + }, + { + "download_count": 189912, + "project": "tableschema" + }, + { + "download_count": 189480, + "project": "segtok" + }, + { + "download_count": 189475, + "project": "contentful" + }, + { + "download_count": 189290, + "project": "ropgadget" + }, + { + "download_count": 189289, + "project": "user-agent" + }, + { + "download_count": 189193, + "project": "django-profiler" + }, + { + "download_count": 189156, + "project": "devstack-tools" + }, + { + "download_count": 188865, + "project": "django-leaflet" + }, + { + "download_count": 188683, + "project": "datetime-truncate" + }, + { + "download_count": 188451, + "project": "pyjslint" + }, + { + "download_count": 188348, + "project": "dvc" + }, + { + "download_count": 188172, + "project": "zope-cachedescriptors" + }, + { + "download_count": 188122, + "project": "onetoken" + }, + { + "download_count": 188063, + "project": "ipfshttpclient" + }, + { + "download_count": 187976, + "project": "azure-functions" + }, + { + "download_count": 187875, + "project": "optimizely-sdk" + }, + { + "download_count": 187858, + "project": "cwltool" + }, + { + "download_count": 187574, + "project": "seqdiag" + }, + { + "download_count": 187547, + "project": "libthumbor" + }, + { + "download_count": 187440, + "project": "atlassian-python-api" + }, + { + "download_count": 187397, + "project": "pyobjc-framework-corewlan" + }, + { + "download_count": 187363, + "project": "azure-cli-natgateway" + }, + { + "download_count": 187117, + "project": "pyobjc-framework-imagecapturecore" + }, + { + "download_count": 186984, + "project": "django-hosts" + }, + { + "download_count": 186865, + "project": "pytest-reportportal" + }, + { + "download_count": 186711, + "project": "pyobjc-framework-avfoundation" + }, + { + "download_count": 186705, + "project": "pyobjc-framework-corebluetooth" + }, + { + "download_count": 186590, + "project": "glog" + }, + { + "download_count": 186547, + "project": "pyobjc-framework-mapkit" + }, + { + "download_count": 186536, + "project": "pyobjc-framework-avkit" + }, + { + "download_count": 186474, + "project": "pyobjc-framework-storekit" + }, + { + "download_count": 186445, + "project": "pypom" + }, + { + "download_count": 186363, + "project": "pyobjc-framework-multipeerconnectivity" + }, + { + "download_count": 186349, + "project": "pyobjc-framework-scenekit" + }, + { + "download_count": 186324, + "project": "richenum" + }, + { + "download_count": 186299, + "project": "pyobjc-framework-imserviceplugin" + }, + { + "download_count": 186260, + "project": "pyobjc-framework-gamecenter" + }, + { + "download_count": 186239, + "project": "boto3-type-annotations-with-docs" + }, + { + "download_count": 186229, + "project": "pyobjc-framework-spritekit" + }, + { + "download_count": 186187, + "project": "pyobjc-framework-notificationcenter" + }, + { + "download_count": 186170, + "project": "salttesting" + }, + { + "download_count": 186131, + "project": "you-get" + }, + { + "download_count": 186067, + "project": "pyobjc-framework-cryptotokenkit" + }, + { + "download_count": 186058, + "project": "pytest-catchlog" + }, + { + "download_count": 185930, + "project": "iptcinfo" + }, + { + "download_count": 185874, + "project": "hashin" + }, + { + "download_count": 185785, + "project": "colormath" + }, + { + "download_count": 185776, + "project": "nanotime" + }, + { + "download_count": 185712, + "project": "python-saharaclient" + }, + { + "download_count": 185687, + "project": "yanc" + }, + { + "download_count": 185684, + "project": "methodtools" + }, + { + "download_count": 185575, + "project": "pytest-openfiles" + }, + { + "download_count": 185568, + "project": "zope-security" + }, + { + "download_count": 185489, + "project": "django-crequest" + }, + { + "download_count": 185383, + "project": "pymemoize" + }, + { + "download_count": 185321, + "project": "django-fsm-log" + }, + { + "download_count": 185307, + "project": "django-warrant" + }, + { + "download_count": 185226, + "project": "acora" + }, + { + "download_count": 184984, + "project": "python-hpilo" + }, + { + "download_count": 184866, + "project": "zope-exceptions" + }, + { + "download_count": 184842, + "project": "ase" + }, + { + "download_count": 184834, + "project": "django-debug-toolbar-request-history" + }, + { + "download_count": 184816, + "project": "clipboard" + }, + { + "download_count": 184780, + "project": "manifest-tool" + }, + { + "download_count": 184769, + "project": "pdftotext" + }, + { + "download_count": 184767, + "project": "events" + }, + { + "download_count": 184609, + "project": "zope-contenttype" + }, + { + "download_count": 184473, + "project": "django-discover-runner" + }, + { + "download_count": 184469, + "project": "libtiff" + }, + { + "download_count": 184406, + "project": "sqlacodegen" + }, + { + "download_count": 184172, + "project": "pyomo" + }, + { + "download_count": 184107, + "project": "django-admin-sortable" + }, + { + "download_count": 183722, + "project": "oic" + }, + { + "download_count": 183626, + "project": "django-user-tasks" + }, + { + "download_count": 183425, + "project": "edx-lint" + }, + { + "download_count": 183383, + "project": "netfilterqueue" + }, + { + "download_count": 183355, + "project": "zope-location" + }, + { + "download_count": 183073, + "project": "pyobjc-framework-qtkit" + }, + { + "download_count": 183058, + "project": "apispec-webframeworks" + }, + { + "download_count": 183054, + "project": "django-dbbackup" + }, + { + "download_count": 182995, + "project": "interpret-core" + }, + { + "download_count": 182971, + "project": "docker-compose-wait" + }, + { + "download_count": 182913, + "project": "socketpool" + }, + { + "download_count": 182775, + "project": "qgrid" + }, + { + "download_count": 182678, + "project": "localstack-ext" + }, + { + "download_count": 182643, + "project": "munkres" + }, + { + "download_count": 182633, + "project": "django-admin-list-filter-dropdown" + }, + { + "download_count": 182500, + "project": "edx-ccx-keys" + }, + { + "download_count": 182205, + "project": "jsonrpclib" + }, + { + "download_count": 182178, + "project": "pyinstrument-cext" + }, + { + "download_count": 182161, + "project": "wsgiproxy2" + }, + { + "download_count": 182080, + "project": "msgfy" + }, + { + "download_count": 182061, + "project": "localstack" + }, + { + "download_count": 182033, + "project": "mpl-finance" + }, + { + "download_count": 182028, + "project": "sinon" + }, + { + "download_count": 181902, + "project": "pyobjc-framework-photos" + }, + { + "download_count": 181883, + "project": "pyobjc-framework-contacts" + }, + { + "download_count": 181832, + "project": "pyobjc-framework-safariservices" + }, + { + "download_count": 181822, + "project": "nagiosplugin" + }, + { + "download_count": 181811, + "project": "hbmqtt" + }, + { + "download_count": 181809, + "project": "pyobjc-framework-photosui" + }, + { + "download_count": 181782, + "project": "rfc6266" + }, + { + "download_count": 181770, + "project": "wtforms-alchemy" + }, + { + "download_count": 181753, + "project": "pyobjc-framework-modelio" + }, + { + "download_count": 181752, + "project": "gocardless-pro" + }, + { + "download_count": 181742, + "project": "pyobjc-framework-applicationservices" + }, + { + "download_count": 181658, + "project": "datadog-checks-base" + }, + { + "download_count": 181619, + "project": "pyobjc-framework-contactsui" + }, + { + "download_count": 181492, + "project": "zope-publisher" + }, + { + "download_count": 181460, + "project": "pyobjc-framework-applescriptkit" + }, + { + "download_count": 181449, + "project": "pyobjc-framework-networkextension" + }, + { + "download_count": 181408, + "project": "zope-i18n" + }, + { + "download_count": 181315, + "project": "recordio" + }, + { + "download_count": 181306, + "project": "pyobjc-framework-preferencepanes" + }, + { + "download_count": 181204, + "project": "pyobjc-framework-installerplugins" + }, + { + "download_count": 181198, + "project": "pyobjc-framework-automator" + }, + { + "download_count": 181194, + "project": "python-interface" + }, + { + "download_count": 181178, + "project": "dogslow" + }, + { + "download_count": 181007, + "project": "s3pypi" + }, + { + "download_count": 180930, + "project": "arpeggio" + }, + { + "download_count": 180918, + "project": "pyobjc-framework-searchkit" + }, + { + "download_count": 180910, + "project": "pyobjc-framework-latentsemanticmapping" + }, + { + "download_count": 180898, + "project": "imgurpython" + }, + { + "download_count": 180787, + "project": "huey" + }, + { + "download_count": 180646, + "project": "pyobjc-framework-applescriptobjc" + }, + { + "download_count": 180541, + "project": "pyobjc-framework-instantmessage" + }, + { + "download_count": 180484, + "project": "pyclamd" + }, + { + "download_count": 180478, + "project": "pyobjc-framework-accounts" + }, + { + "download_count": 180443, + "project": "pyobjc-framework-servicemanagement" + }, + { + "download_count": 180359, + "project": "sortedcollections" + }, + { + "download_count": 180352, + "project": "pyobjc-framework-dictionaryservices" + }, + { + "download_count": 180326, + "project": "pyobjc-framework-pubsub" + }, + { + "download_count": 180234, + "project": "pyobjc-framework-collaboration" + }, + { + "download_count": 180184, + "project": "cqlsh" + }, + { + "download_count": 180108, + "project": "hacs-frontend" + }, + { + "download_count": 179819, + "project": "pyobjc-framework-social" + }, + { + "download_count": 179803, + "project": "pybars3" + }, + { + "download_count": 179768, + "project": "pyobjc-framework-eventkit" + }, + { + "download_count": 179757, + "project": "pyobjc-framework-opendirectory" + }, + { + "download_count": 179716, + "project": "chatterbot" + }, + { + "download_count": 179610, + "project": "neovim" + }, + { + "download_count": 179540, + "project": "json-logging" + }, + { + "download_count": 179401, + "project": "pytest-splinter" + }, + { + "download_count": 179317, + "project": "fig" + }, + { + "download_count": 179255, + "project": "pyte" + }, + { + "download_count": 179193, + "project": "bagit" + }, + { + "download_count": 179031, + "project": "aiohttp-swagger" + }, + { + "download_count": 178930, + "project": "django-cronman" + }, + { + "download_count": 178836, + "project": "robotframework-pageobjectlibrary" + }, + { + "download_count": 178805, + "project": "django-tenant-schemas" + }, + { + "download_count": 178606, + "project": "pypcd" + }, + { + "download_count": 178579, + "project": "s3contents" + }, + { + "download_count": 178532, + "project": "pytube" + }, + { + "download_count": 178420, + "project": "srvlookup" + }, + { + "download_count": 178249, + "project": "django-cache-url" + }, + { + "download_count": 178237, + "project": "pytest-sanic" + }, + { + "download_count": 178164, + "project": "pybase62" + }, + { + "download_count": 178040, + "project": "modulegraph" + }, + { + "download_count": 177513, + "project": "flufl-lock" + }, + { + "download_count": 177343, + "project": "pyobjc-framework-intents" + }, + { + "download_count": 177128, + "project": "playsound" + }, + { + "download_count": 177060, + "project": "django-sql-explorer" + }, + { + "download_count": 177040, + "project": "pymavlink" + }, + { + "download_count": 176939, + "project": "snowflake" + }, + { + "download_count": 176684, + "project": "drfdocs" + }, + { + "download_count": 176663, + "project": "django-sendfile" + }, + { + "download_count": 176504, + "project": "zope-testing" + }, + { + "download_count": 176439, + "project": "autocorrect" + }, + { + "download_count": 176429, + "project": "django-filters" + }, + { + "download_count": 176316, + "project": "delighted" + }, + { + "download_count": 176189, + "project": "pick" + }, + { + "download_count": 176166, + "project": "restricted-pkg" + }, + { + "download_count": 176069, + "project": "tlslite-ng" + }, + { + "download_count": 175910, + "project": "click-datetime" + }, + { + "download_count": 175901, + "project": "mapbox" + }, + { + "download_count": 175833, + "project": "zope-traversing" + }, + { + "download_count": 175827, + "project": "yagmail" + }, + { + "download_count": 175386, + "project": "os-diskconfig-python-novaclient-ext" + }, + { + "download_count": 175252, + "project": "env-utils" + }, + { + "download_count": 175153, + "project": "pyramid-chameleon" + }, + { + "download_count": 175039, + "project": "pysphere" + }, + { + "download_count": 174995, + "project": "pyobjc-framework-calendarstore" + }, + { + "download_count": 174675, + "project": "tfrecord-lite" + }, + { + "download_count": 174598, + "project": "zope-container" + }, + { + "download_count": 174537, + "project": "pyobjc-framework-iosurface" + }, + { + "download_count": 174516, + "project": "pyobjc-framework-netfs" + }, + { + "download_count": 174283, + "project": "zope-browser" + }, + { + "download_count": 174221, + "project": "cymysql" + }, + { + "download_count": 174210, + "project": "scrapy-fake-useragent" + }, + { + "download_count": 174182, + "project": "pysnooper" + }, + { + "download_count": 174143, + "project": "allennlp" + }, + { + "download_count": 174141, + "project": "itchat" + }, + { + "download_count": 174002, + "project": "pytest-arraydiff" + }, + { + "download_count": 174001, + "project": "multimethods" + }, + { + "download_count": 173985, + "project": "concurrencytest" + }, + { + "download_count": 173985, + "project": "pyxattr" + }, + { + "download_count": 173977, + "project": "pyobjc-framework-medialibrary" + }, + { + "download_count": 173974, + "project": "python-vlc" + }, + { + "download_count": 173922, + "project": "django-summernote" + }, + { + "download_count": 173897, + "project": "msal-extensions" + }, + { + "download_count": 173878, + "project": "pyobjc-framework-gamecontroller" + }, + { + "download_count": 173812, + "project": "pyobjc-framework-findersync" + }, + { + "download_count": 173771, + "project": "pyobjc-framework-cloudkit" + }, + { + "download_count": 173753, + "project": "pyobjc-framework-localauthentication" + }, + { + "download_count": 173686, + "project": "pyobjc-framework-mediaaccessibility" + }, + { + "download_count": 173647, + "project": "vega" + }, + { + "download_count": 173582, + "project": "textstat" + }, + { + "download_count": 173469, + "project": "neomodel" + }, + { + "download_count": 173417, + "project": "pyobjc" + }, + { + "download_count": 173414, + "project": "check-puppet-agent" + }, + { + "download_count": 173066, + "project": "os-networksv2-python-novaclient-ext" + }, + { + "download_count": 173034, + "project": "vcd-cli" + }, + { + "download_count": 172953, + "project": "numdifftools" + }, + { + "download_count": 172704, + "project": "tensorflow-graphics" + }, + { + "download_count": 172697, + "project": "pysqslistener" + }, + { + "download_count": 172681, + "project": "kazurator" + }, + { + "download_count": 172661, + "project": "xstatic-roboto-fontface" + }, + { + "download_count": 172595, + "project": "asyncio-nats-streaming" + }, + { + "download_count": 172285, + "project": "slugify" + }, + { + "download_count": 172276, + "project": "jupyter-notebook-gist" + }, + { + "download_count": 172213, + "project": "awsretry" + }, + { + "download_count": 172075, + "project": "flup" + }, + { + "download_count": 172011, + "project": "tornado-aws" + }, + { + "download_count": 171812, + "project": "rackspace-novaclient" + }, + { + "download_count": 171679, + "project": "django-q" + }, + { + "download_count": 171593, + "project": "rax-default-network-flags-python-novaclient-ext" + }, + { + "download_count": 171548, + "project": "object-pool" + }, + { + "download_count": 171504, + "project": "xstatic-font-awesome" + }, + { + "download_count": 171492, + "project": "rackspace-auth-openstack" + }, + { + "download_count": 171339, + "project": "qdarkstyle" + }, + { + "download_count": 171275, + "project": "tox-monorepo" + } + ] +} diff --git a/Tools/peg_generator/peg_extension/__init__.py b/Tools/peg_generator/peg_extension/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/Lib/test/test_peg_generator/ast_dump.py b/Tools/peg_generator/pegen/ast_dump.py similarity index 74% rename from Lib/test/test_peg_generator/ast_dump.py rename to Tools/peg_generator/pegen/ast_dump.py index 22d2dde775597..93dfbfd963ca6 100644 --- a/Lib/test/test_peg_generator/ast_dump.py +++ b/Tools/peg_generator/pegen/ast_dump.py @@ -6,16 +6,17 @@ TODO: Remove the above-described hack. """ + def ast_dump(node, annotate_fields=True, include_attributes=False, *, indent=None): def _format(node, level=0): if indent is not None: level += 1 - prefix = '\n' + indent * level - sep = ',\n' + indent * level + prefix = "\n" + indent * level + sep = ",\n" + indent * level else: - prefix = '' - sep = ', ' - if any(cls.__name__ == 'AST' for cls in node.__class__.__mro__): + prefix = "" + sep = ", " + if any(cls.__name__ == "AST" for cls in node.__class__.__mro__): cls = type(node) args = [] allsimple = True @@ -32,7 +33,7 @@ def _format(node, level=0): value, simple = _format(value, level) allsimple = allsimple and simple if keywords: - args.append('%s=%s' % (name, value)) + args.append("%s=%s" % (name, value)) else: args.append(value) if include_attributes and node._attributes: @@ -45,18 +46,18 @@ def _format(node, level=0): continue value, simple = _format(value, level) allsimple = allsimple and simple - args.append('%s=%s' % (name, value)) + args.append("%s=%s" % (name, value)) if allsimple and len(args) <= 3: - return '%s(%s)' % (node.__class__.__name__, ', '.join(args)), not args - return '%s(%s%s)' % (node.__class__.__name__, prefix, sep.join(args)), False + return "%s(%s)" % (node.__class__.__name__, ", ".join(args)), not args + return "%s(%s%s)" % (node.__class__.__name__, prefix, sep.join(args)), False elif isinstance(node, list): if not node: - return '[]', True - return '[%s%s]' % (prefix, sep.join(_format(x, level)[0] for x in node)), False + return "[]", True + return "[%s%s]" % (prefix, sep.join(_format(x, level)[0] for x in node)), False return repr(node), True - if all(cls.__name__ != 'AST' for cls in node.__class__.__mro__): - raise TypeError('expected AST, got %r' % node.__class__.__name__) + if all(cls.__name__ != "AST" for cls in node.__class__.__mro__): + raise TypeError("expected AST, got %r" % node.__class__.__name__) if indent is not None and not isinstance(indent, str): - indent = ' ' * indent + indent = " " * indent return _format(node)[0] diff --git a/Tools/peg_generator/pegen/build.py b/Tools/peg_generator/pegen/build.py index 907feeaf122de..8f9348ddf24ac 100644 --- a/Tools/peg_generator/pegen/build.py +++ b/Tools/peg_generator/pegen/build.py @@ -3,6 +3,7 @@ import tokenize import sys import sysconfig +import tempfile import itertools from typing import Optional, Tuple, List, IO, Iterator, Set, Dict @@ -162,9 +163,13 @@ def build_c_generator( gen.generate(grammar_file) if compile_extension: - compile_c_extension( - output_file, verbose=verbose_c_extension, keep_asserts=keep_asserts_in_extension - ) + with tempfile.TemporaryDirectory() as build_dir: + compile_c_extension( + output_file, + build_dir=build_dir, + verbose=verbose_c_extension, + keep_asserts=keep_asserts_in_extension, + ) return gen diff --git a/Tools/peg_generator/scripts/benchmark.py b/Tools/peg_generator/scripts/benchmark.py index 6b4287cd8cecc..0e9d5bd104813 100644 --- a/Tools/peg_generator/scripts/benchmark.py +++ b/Tools/peg_generator/scripts/benchmark.py @@ -105,10 +105,7 @@ def run_benchmark_stdlib(subcommand, parser): "../../Lib", "../../Grammar/python.gram", verbose=False, - excluded_files=[ - "*/bad*", - "*/lib2to3/tests/data/*", - ], + excluded_files=["*/bad*", "*/lib2to3/tests/data/*",], skip_actions=False, tree_arg=0, short=True, diff --git a/Tools/peg_generator/scripts/show_parse.py b/Tools/peg_generator/scripts/show_parse.py index f5f92fdaf755d..1a0410e1bac8f 100755 --- a/Tools/peg_generator/scripts/show_parse.py +++ b/Tools/peg_generator/scripts/show_parse.py @@ -32,6 +32,9 @@ from typing import List +sys.path.insert(0, os.getcwd()) +from pegen.ast_dump import ast_dump + parser = argparse.ArgumentParser() parser.add_argument( "-d", "--diff", action="store_true", help="show diff between grammar and ast (requires -g)" @@ -49,7 +52,7 @@ def format_tree(tree: ast.AST, verbose: bool = False) -> str: with tempfile.NamedTemporaryFile("w+") as tf: - tf.write(ast.dump(tree, include_attributes=verbose)) + tf.write(ast_dump(tree, include_attributes=verbose)) tf.write("\n") tf.flush() cmd = f"black -q {tf.name}" diff --git a/Tools/peg_generator/scripts/test_parse_directory.py b/Tools/peg_generator/scripts/test_parse_directory.py index 6511a2d932f74..a6078ef564061 100755 --- a/Tools/peg_generator/scripts/test_parse_directory.py +++ b/Tools/peg_generator/scripts/test_parse_directory.py @@ -14,6 +14,7 @@ sys.path.insert(0, os.getcwd()) from pegen.build import build_c_parser_and_generator +from pegen.ast_dump import ast_dump from pegen.testutil import print_memstats from scripts import show_parse @@ -85,8 +86,8 @@ def compare_trees( with open(file) as f: expected_tree = ast.parse(f.read()) - expected_text = ast.dump(expected_tree, include_attributes=include_attributes) - actual_text = ast.dump(actual_tree, include_attributes=include_attributes) + expected_text = ast_dump(expected_tree, include_attributes=include_attributes) + actual_text = ast_dump(actual_tree, include_attributes=include_attributes) if actual_text == expected_text: if verbose: print("Tree for {file}:") @@ -164,7 +165,7 @@ def parse_directory( if parser == "pegen": try: from peg_extension import parse # type: ignore - except: + except Exception as e: print( "An existing parser was not found. Please run `make` or specify a grammar file with the `-g` flag.", file=sys.stderr, diff --git a/Tools/peg_generator/scripts/test_pypi_packages.py b/Tools/peg_generator/scripts/test_pypi_packages.py index 90490330fef1d..7586b1a21fa6d 100755 --- a/Tools/peg_generator/scripts/test_pypi_packages.py +++ b/Tools/peg_generator/scripts/test_pypi_packages.py @@ -6,14 +6,18 @@ import tarfile import zipfile import shutil +import pathlib import sys from typing import Generator, Any sys.path.insert(0, ".") + from pegen import build from scripts import test_parse_directory +HERE = pathlib.Path(__file__).resolve().parent + argparser = argparse.ArgumentParser( prog="test_pypi_packages", description="Helper program to test parsing PyPI packages", ) @@ -53,7 +57,8 @@ def find_dirname(package_name: str) -> str: def run_tests(dirname: str, tree: int, extension: Any) -> int: return test_parse_directory.parse_directory( dirname, - "data/python.gram", + HERE / ".." / ".." / ".." / "Grammar" / "python.gram", + HERE / ".." / ".." / ".." / "Grammar" / "Tokens", verbose=False, excluded_files=[ "*/failset/*", @@ -68,6 +73,8 @@ def run_tests(dirname: str, tree: int, extension: Any) -> int: tree_arg=tree, short=True, extension=extension, + mode=1, + parser="pegen", ) @@ -75,9 +82,13 @@ def main() -> None: args = argparser.parse_args() tree = args.tree - extension = build.build_parser_and_generator( - "data/python.gram", "peg_parser/parse.c", compile_extension=True + extension = build.build_c_parser_and_generator( + HERE / ".." / ".." / ".." / "Grammar" / "python.gram", + HERE / ".." / ".." / ".." / "Grammar" / "Tokens", + "peg_extension/parse.c", + compile_extension=True, ) + for package in get_packages(): print(f"Extracting files from {package}... ", end="") try: @@ -91,7 +102,6 @@ def main() -> None: dirname = find_dirname(package) status = run_tests(dirname, tree, extension) if status == 0: - print("Done") shutil.rmtree(dirname) else: print(f"Failed to parse {dirname}") From webhook-mailer at python.org Sat May 2 00:23:47 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sat, 02 May 2020 04:23:47 -0000 Subject: [Python-checkins] bpo-40334: regenerate metaparser as part of regen-all (GH-19854) Message-ID: https://github.com/python/cpython/commit/d2baff4301387e232495491f7291903cc1217d21 commit: d2baff4301387e232495491f7291903cc1217d21 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-02T05:23:39+01:00 summary: bpo-40334: regenerate metaparser as part of regen-all (GH-19854) files: M Makefile.pre.in M Tools/peg_generator/pegen/grammar_parser.py diff --git a/Makefile.pre.in b/Makefile.pre.in index fa7fb1fcc167f..2b0b1b205a940 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -745,8 +745,9 @@ regen-importlib: Programs/_freeze_importlib ############################################################################ # Regenerate all generated files -regen-all: regen-opcode regen-opcode-targets regen-typeslots regen-grammar regen-pegen \ - regen-token regen-keyword regen-symbol regen-ast regen-importlib clinic +regen-all: regen-opcode regen-opcode-targets regen-typeslots regen-grammar \ + regen-token regen-keyword regen-symbol regen-ast regen-importlib clinic \ + regen-pegen-metaparser regen-pegen ############################################################################ # Special rules for object files @@ -820,6 +821,14 @@ regen-grammar: regen-token $(UPDATE_FILE) $(srcdir)/Include/graminit.h $(srcdir)/Include/graminit.h.new $(UPDATE_FILE) $(srcdir)/Python/graminit.c $(srcdir)/Python/graminit.c.new +.PHONY: regen-pegen-metaparser +regen-pegen-metaparser: + PYTHONPATH=$(srcdir)/Tools/peg_generator $(PYTHON_FOR_REGEN) -m pegen -q python \ + $(srcdir)/Tools/peg_generator/pegen/metagrammar.gram \ + -o $(srcdir)/Tools/peg_generator/pegen/grammar_parser.py.new + $(UPDATE_FILE) $(srcdir)/Tools/peg_generator/pegen/grammar_parser.py \ + $(srcdir)/Tools/peg_generator/pegen/grammar_parser.py.new + .PHONY: regen-pegen regen-pegen: @$(MKDIR_P) $(srcdir)/Parser/pegen diff --git a/Tools/peg_generator/pegen/grammar_parser.py b/Tools/peg_generator/pegen/grammar_parser.py index 0e206ee9cd5e4..c784cfdf3b266 100644 --- a/Tools/peg_generator/pegen/grammar_parser.py +++ b/Tools/peg_generator/pegen/grammar_parser.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3.8 -# @generated by pegen from pegen/metagrammar.gram +# @generated by pegen from ./Tools/peg_generator/pegen/metagrammar.gram import ast import sys From webhook-mailer at python.org Sat May 2 02:38:09 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sat, 02 May 2020 06:38:09 -0000 Subject: [Python-checkins] bpo-39435: Make the first argument of pickle.loads() positional-only. (GH-19846) Message-ID: https://github.com/python/cpython/commit/531d1e541284bfd7944f8c66a5e8c3c3234afaff commit: 531d1e541284bfd7944f8c66a5e8c3c3234afaff branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-02T09:38:01+03:00 summary: bpo-39435: Make the first argument of pickle.loads() positional-only. (GH-19846) It was positional-only de facto: documentation and two implementations used three different name. files: A Misc/NEWS.d/next/Library/2020-05-01-23-24-25.bpo-39435.mgb6ib.rst M Doc/library/pickle.rst M Lib/pickle.py M Modules/_pickle.c M Modules/clinic/_pickle.c.h diff --git a/Doc/library/pickle.rst b/Doc/library/pickle.rst index d92e947a76403..b7c3452771948 100644 --- a/Doc/library/pickle.rst +++ b/Doc/library/pickle.rst @@ -252,7 +252,7 @@ process more convenient: .. versionchanged:: 3.8 The *buffers* argument was added. -.. function:: loads(data, \*, fix_imports=True, encoding="ASCII", errors="strict", buffers=None) +.. function:: loads(data, /, \*, fix_imports=True, encoding="ASCII", errors="strict", buffers=None) Return the reconstituted object hierarchy of the pickled representation *data* of an object. *data* must be a :term:`bytes-like object`. diff --git a/Lib/pickle.py b/Lib/pickle.py index 1fc8b0d26c6c4..cbac5f168b45e 100644 --- a/Lib/pickle.py +++ b/Lib/pickle.py @@ -13,7 +13,7 @@ dump(object, file) dumps(object) -> string load(file) -> object - loads(string) -> object + loads(bytes) -> object Misc variables: @@ -1761,7 +1761,7 @@ def _load(file, *, fix_imports=True, encoding="ASCII", errors="strict", return _Unpickler(file, fix_imports=fix_imports, buffers=buffers, encoding=encoding, errors=errors).load() -def _loads(s, *, fix_imports=True, encoding="ASCII", errors="strict", +def _loads(s, /, *, fix_imports=True, encoding="ASCII", errors="strict", buffers=None): if isinstance(s, str): raise TypeError("Can't load pickle from unicode string") diff --git a/Misc/NEWS.d/next/Library/2020-05-01-23-24-25.bpo-39435.mgb6ib.rst b/Misc/NEWS.d/next/Library/2020-05-01-23-24-25.bpo-39435.mgb6ib.rst new file mode 100644 index 0000000000000..2a516a53ed9e2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-01-23-24-25.bpo-39435.mgb6ib.rst @@ -0,0 +1 @@ +The first argument of :func:`pickle.loads` is now positional-only. diff --git a/Modules/_pickle.c b/Modules/_pickle.c index d07fa53a1235e..5539e64025a39 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -7873,6 +7873,7 @@ _pickle_load_impl(PyObject *module, PyObject *file, int fix_imports, _pickle.loads data: object + / * fix_imports: bool = True encoding: str = 'ASCII' @@ -7899,7 +7900,7 @@ static PyObject * _pickle_loads_impl(PyObject *module, PyObject *data, int fix_imports, const char *encoding, const char *errors, PyObject *buffers) -/*[clinic end generated code: output=82ac1e6b588e6d02 input=9c2ab6a0960185ea]*/ +/*[clinic end generated code: output=82ac1e6b588e6d02 input=b3615540d0535087]*/ { PyObject *result; UnpicklerObject *unpickler = _Unpickler_New(); diff --git a/Modules/clinic/_pickle.c.h b/Modules/clinic/_pickle.c.h index 0457a433e79fb..136524b6a7134 100644 --- a/Modules/clinic/_pickle.c.h +++ b/Modules/clinic/_pickle.c.h @@ -735,7 +735,7 @@ _pickle_load(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject } PyDoc_STRVAR(_pickle_loads__doc__, -"loads($module, /, data, *, fix_imports=True, encoding=\'ASCII\',\n" +"loads($module, data, /, *, fix_imports=True, encoding=\'ASCII\',\n" " errors=\'strict\', buffers=())\n" "--\n" "\n" @@ -766,7 +766,7 @@ static PyObject * _pickle_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; - static const char * const _keywords[] = {"data", "fix_imports", "encoding", "errors", "buffers", NULL}; + static const char * const _keywords[] = {"", "fix_imports", "encoding", "errors", "buffers", NULL}; static _PyArg_Parser _parser = {NULL, _keywords, "loads", 0}; PyObject *argsbuf[5]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; @@ -836,4 +836,4 @@ _pickle_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=e2506823be1960c5 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=324aad69644beda2 input=a9049054013a1b77]*/ From webhook-mailer at python.org Sat May 2 04:08:05 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sat, 02 May 2020 08:08:05 -0000 Subject: [Python-checkins] [3.8] bpo-40398: Fix typing.get_args() for special generic aliases. (GH-19720) (GH-19857) Message-ID: https://github.com/python/cpython/commit/a629d4c63c55ba36be36ff105dfc103b710c9a2d commit: a629d4c63c55ba36be36ff105dfc103b710c9a2d branch: 3.8 author: Serhiy Storchaka committer: GitHub date: 2020-05-02T11:08:00+03:00 summary: [3.8] bpo-40398: Fix typing.get_args() for special generic aliases. (GH-19720) (GH-19857) (cherry picked from commit 6292be7adf247589bbf03524f8883cb4cb61f3e9) files: A Misc/NEWS.d/next/Library/2020-04-26-22-25-36.bpo-40398.OdXnR3.rst M Lib/test/test_typing.py M Lib/typing.py diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index bdd7acd85914c..83bfef14dfb96 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -2913,6 +2913,9 @@ class C(Generic[T]): pass self.assertIs(get_origin(Generic), Generic) self.assertIs(get_origin(Generic[T]), Generic) self.assertIs(get_origin(List[Tuple[T, T]][int]), list) + self.assertIs(get_origin(List), list) + self.assertIs(get_origin(Tuple), tuple) + self.assertIs(get_origin(Callable), collections.abc.Callable) def test_get_args(self): T = TypeVar('T') @@ -2928,11 +2931,15 @@ class C(Generic[T]): pass (int, Tuple[str, int])) self.assertEqual(get_args(typing.Dict[int, Tuple[T, T]][Optional[int]]), (int, Tuple[Optional[int], Optional[int]])) - self.assertEqual(get_args(Callable[[], T][int]), ([], int,)) + self.assertEqual(get_args(Callable[[], T][int]), ([], int)) + self.assertEqual(get_args(Callable[..., int]), (..., int)) self.assertEqual(get_args(Union[int, Callable[[Tuple[T, ...]], str]]), (int, Callable[[Tuple[T, ...]], str])) self.assertEqual(get_args(Tuple[int, ...]), (int, ...)) self.assertEqual(get_args(Tuple[()]), ((),)) + self.assertEqual(get_args(List), ()) + self.assertEqual(get_args(Tuple), ()) + self.assertEqual(get_args(Callable), ()) class CollectionsAbcTests(BaseTestCase): diff --git a/Lib/typing.py b/Lib/typing.py index f4fb08f4500de..589eea98ad31c 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -1300,7 +1300,7 @@ def get_args(tp): get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) get_args(Callable[[], T][int]) == ([], int) """ - if isinstance(tp, _GenericAlias): + if isinstance(tp, _GenericAlias) and not tp._special: res = tp.__args__ if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: res = (list(res[:-1]), res[-1]) diff --git a/Misc/NEWS.d/next/Library/2020-04-26-22-25-36.bpo-40398.OdXnR3.rst b/Misc/NEWS.d/next/Library/2020-04-26-22-25-36.bpo-40398.OdXnR3.rst new file mode 100644 index 0000000000000..a56da0c109592 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-26-22-25-36.bpo-40398.OdXnR3.rst @@ -0,0 +1,2 @@ +:func:`typing.get_args` now always returns an empty tuple for special +generic aliases. From webhook-mailer at python.org Sat May 2 11:15:35 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sat, 02 May 2020 15:15:35 -0000 Subject: [Python-checkins] Call $(MKDIR_P) before regenerating the PEG meta-parser (GH-19861) Message-ID: https://github.com/python/cpython/commit/b7226eb0b76dd867917c6f66fd68fe6f8da01d9c commit: b7226eb0b76dd867917c6f66fd68fe6f8da01d9c branch: master author: Pablo Galindo committer: GitHub date: 2020-05-02T16:15:27+01:00 summary: Call $(MKDIR_P) before regenerating the PEG meta-parser (GH-19861) files: M Makefile.pre.in diff --git a/Makefile.pre.in b/Makefile.pre.in index 2b0b1b205a940..3cb8b84157f0e 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -823,6 +823,7 @@ regen-grammar: regen-token .PHONY: regen-pegen-metaparser regen-pegen-metaparser: + @$(MKDIR_P) $(srcdir)/Tools/peg_generator/pegen PYTHONPATH=$(srcdir)/Tools/peg_generator $(PYTHON_FOR_REGEN) -m pegen -q python \ $(srcdir)/Tools/peg_generator/pegen/metagrammar.gram \ -o $(srcdir)/Tools/peg_generator/pegen/grammar_parser.py.new From webhook-mailer at python.org Sat May 2 12:12:11 2020 From: webhook-mailer at python.org (Sander) Date: Sat, 02 May 2020 16:12:11 -0000 Subject: [Python-checkins] bpo-40419: timeit CLI docs now mention 1, 2, 5, 10, ... trials instead of powers of 10 (GH-19752) Message-ID: https://github.com/python/cpython/commit/766352320fd736e2c8ed545b4cc57563f61a0b9d commit: 766352320fd736e2c8ed545b4cc57563f61a0b9d branch: master author: Sander committer: GitHub date: 2020-05-02T19:12:05+03:00 summary: bpo-40419: timeit CLI docs now mention 1,2,5,10,... trials instead of powers of 10 (GH-19752) files: M Doc/library/timeit.rst M Lib/timeit.py diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst index ef7a4e40be659..46fa62c15fc2e 100644 --- a/Doc/library/timeit.rst +++ b/Doc/library/timeit.rst @@ -251,7 +251,8 @@ quotes and using leading spaces. Multiple :option:`-s` options are treated similarly. If :option:`-n` is not given, a suitable number of loops is calculated by trying -successive powers of 10 until the total time is at least 0.2 seconds. +increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the total +time is at least 0.2 seconds. :func:`default_timer` measurements can be affected by other programs running on the same machine, so the best thing to do when accurate timing is necessary is diff --git a/Lib/timeit.py b/Lib/timeit.py index c0362bcc5f3e2..6c3ec01067f2d 100755 --- a/Lib/timeit.py +++ b/Lib/timeit.py @@ -29,7 +29,8 @@ treated similarly. If -n is not given, a suitable number of loops is calculated by trying -successive powers of 10 until the total time is at least 0.2 seconds. +increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the +total time is at least 0.2 seconds. Note: there is a certain baseline overhead associated with executing a pass statement. It differs between versions. The code here doesn't try From webhook-mailer at python.org Sat May 2 12:29:57 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 02 May 2020 16:29:57 -0000 Subject: [Python-checkins] bpo-40419: timeit CLI docs now mention 1, 2, 5, 10, ... trials instead of powers of 10 (GH-19752) Message-ID: https://github.com/python/cpython/commit/399b9a4a620f544c1afa3b8c7fd82d093b5cc76d commit: 399b9a4a620f544c1afa3b8c7fd82d093b5cc76d branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-02T09:29:51-07:00 summary: bpo-40419: timeit CLI docs now mention 1,2,5,10,... trials instead of powers of 10 (GH-19752) (cherry picked from commit 766352320fd736e2c8ed545b4cc57563f61a0b9d) Co-authored-by: Sander files: M Doc/library/timeit.rst M Lib/timeit.py diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst index ef7a4e40be659..46fa62c15fc2e 100644 --- a/Doc/library/timeit.rst +++ b/Doc/library/timeit.rst @@ -251,7 +251,8 @@ quotes and using leading spaces. Multiple :option:`-s` options are treated similarly. If :option:`-n` is not given, a suitable number of loops is calculated by trying -successive powers of 10 until the total time is at least 0.2 seconds. +increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the total +time is at least 0.2 seconds. :func:`default_timer` measurements can be affected by other programs running on the same machine, so the best thing to do when accurate timing is necessary is diff --git a/Lib/timeit.py b/Lib/timeit.py index c0362bcc5f3e2..6c3ec01067f2d 100755 --- a/Lib/timeit.py +++ b/Lib/timeit.py @@ -29,7 +29,8 @@ treated similarly. If -n is not given, a suitable number of loops is calculated by trying -successive powers of 10 until the total time is at least 0.2 seconds. +increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the +total time is at least 0.2 seconds. Note: there is a certain baseline overhead associated with executing a pass statement. It differs between versions. The code here doesn't try From webhook-mailer at python.org Sat May 2 12:29:57 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 02 May 2020 16:29:57 -0000 Subject: [Python-checkins] bpo-40419: timeit CLI docs now mention 1, 2, 5, 10, ... trials instead of powers of 10 (GH-19752) Message-ID: https://github.com/python/cpython/commit/4eec39a98c57bc374888b54c34ca11fdffcffc07 commit: 4eec39a98c57bc374888b54c34ca11fdffcffc07 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-02T09:29:49-07:00 summary: bpo-40419: timeit CLI docs now mention 1,2,5,10,... trials instead of powers of 10 (GH-19752) (cherry picked from commit 766352320fd736e2c8ed545b4cc57563f61a0b9d) Co-authored-by: Sander files: M Doc/library/timeit.rst M Lib/timeit.py diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst index ef7a4e40be659..46fa62c15fc2e 100644 --- a/Doc/library/timeit.rst +++ b/Doc/library/timeit.rst @@ -251,7 +251,8 @@ quotes and using leading spaces. Multiple :option:`-s` options are treated similarly. If :option:`-n` is not given, a suitable number of loops is calculated by trying -successive powers of 10 until the total time is at least 0.2 seconds. +increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the total +time is at least 0.2 seconds. :func:`default_timer` measurements can be affected by other programs running on the same machine, so the best thing to do when accurate timing is necessary is diff --git a/Lib/timeit.py b/Lib/timeit.py index c0362bcc5f3e2..6c3ec01067f2d 100755 --- a/Lib/timeit.py +++ b/Lib/timeit.py @@ -29,7 +29,8 @@ treated similarly. If -n is not given, a suitable number of loops is calculated by trying -successive powers of 10 until the total time is at least 0.2 seconds. +increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the +total time is at least 0.2 seconds. Note: there is a certain baseline overhead associated with executing a pass statement. It differs between versions. The code here doesn't try From webhook-mailer at python.org Sat May 2 19:45:36 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sat, 02 May 2020 23:45:36 -0000 Subject: [Python-checkins] bpo-40465: Deprecate the optional argument to random.shuffle(). (#19867) Message-ID: https://github.com/python/cpython/commit/190fac99c58232f3e0b34891872b91e50ea2f057 commit: 190fac99c58232f3e0b34891872b91e50ea2f057 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-02T16:45:32-07:00 summary: bpo-40465: Deprecate the optional argument to random.shuffle(). (#19867) files: A Misc/NEWS.d/next/Library/2020-05-02-12-00-28.bpo-40465.qfCjOD.rst M Doc/library/random.rst M Lib/random.py M Lib/test/test_random.py diff --git a/Doc/library/random.rst b/Doc/library/random.rst index 291eca3a3f16a..43a9902f6c11f 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -208,6 +208,9 @@ Functions for sequences generated. For example, a sequence of length 2080 is the largest that can fit within the period of the Mersenne Twister random number generator. + .. deprecated-removed:: 3.9 3.11 + The optional parameter *random*. + .. function:: sample(population, k) diff --git a/Lib/random.py b/Lib/random.py index 8f840e1abb908..f2c4f39fb6079 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -321,6 +321,10 @@ def shuffle(self, x, random=None): j = randbelow(i+1) x[i], x[j] = x[j], x[i] else: + _warn('The *random* parameter to shuffle() has been deprecated\n' + 'since Python 3.9 and will be removed in a subsequent ' + 'version.', + DeprecationWarning, 2) _int = int for i in reversed(range(1, len(x))): # pick an element in x[:i+1] with which to exchange x[i] diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py index 6d87d21cf22c6..bb95ca0884a51 100644 --- a/Lib/test/test_random.py +++ b/Lib/test/test_random.py @@ -103,7 +103,8 @@ def test_shuffle_random_argument(self): shuffle = self.gen.shuffle mock_random = unittest.mock.Mock(return_value=0.5) seq = bytearray(b'abcdefghijk') - shuffle(seq, mock_random) + with self.assertWarns(DeprecationWarning): + shuffle(seq, mock_random) mock_random.assert_called_with() def test_choice(self): diff --git a/Misc/NEWS.d/next/Library/2020-05-02-12-00-28.bpo-40465.qfCjOD.rst b/Misc/NEWS.d/next/Library/2020-05-02-12-00-28.bpo-40465.qfCjOD.rst new file mode 100644 index 0000000000000..7ce9a44c712e7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-12-00-28.bpo-40465.qfCjOD.rst @@ -0,0 +1 @@ +Deprecated the optional *random* argument to *random.shuffle()*. From webhook-mailer at python.org Sat May 2 19:50:52 2020 From: webhook-mailer at python.org (Mathieu Dupuy) Date: Sat, 02 May 2020 23:50:52 -0000 Subject: [Python-checkins] Fix missing space in docs(GH-19866) Message-ID: https://github.com/python/cpython/commit/8aab84312e6062cda44cc67c2b7c0c0f70119c67 commit: 8aab84312e6062cda44cc67c2b7c0c0f70119c67 branch: master author: Mathieu Dupuy committer: GitHub date: 2020-05-02T16:50:47-07:00 summary: Fix missing space in docs(GH-19866) files: M Doc/library/random.rst diff --git a/Doc/library/random.rst b/Doc/library/random.rst index 43a9902f6c11f..ab4ca4b3f8532 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -107,7 +107,7 @@ Bookkeeping functions .. function:: getrandbits(k) Returns a Python integer with *k* random bits. This method is supplied with - the MersenneTwister generator and some other generators may also provide it + the Mersenne Twister generator and some other generators may also provide it as an optional part of the API. When available, :meth:`getrandbits` enables :meth:`randrange` to handle arbitrarily large ranges. From webhook-mailer at python.org Sat May 2 20:05:56 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 03 May 2020 00:05:56 -0000 Subject: [Python-checkins] Fix missing space in docs(GH-19866) (GH-19872) Message-ID: https://github.com/python/cpython/commit/108e45524d5b2d8aa0d7feb1e593ea061fb36ba4 commit: 108e45524d5b2d8aa0d7feb1e593ea061fb36ba4 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-02T17:05:49-07:00 summary: Fix missing space in docs(GH-19866) (GH-19872) files: M Doc/library/random.rst diff --git a/Doc/library/random.rst b/Doc/library/random.rst index c01b2294b0436..2149e800e1657 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -102,7 +102,7 @@ Bookkeeping functions .. function:: getrandbits(k) Returns a Python integer with *k* random bits. This method is supplied with - the MersenneTwister generator and some other generators may also provide it + the Mersenne Twister generator and some other generators may also provide it as an optional part of the API. When available, :meth:`getrandbits` enables :meth:`randrange` to handle arbitrarily large ranges. From webhook-mailer at python.org Sat May 2 22:30:34 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sun, 03 May 2020 02:30:34 -0000 Subject: [Python-checkins] Minor code cleanups for statistics (GH-19873) Message-ID: https://github.com/python/cpython/commit/0400a7f2f8abec8d441990e951cc25f69a2a4036 commit: 0400a7f2f8abec8d441990e951cc25f69a2a4036 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-02T19:30:24-07:00 summary: Minor code cleanups for statistics (GH-19873) * Minor cleanups: Removed unused code. Move C import near its Python version. * Clean-up whitespace files: M Lib/statistics.py diff --git a/Lib/statistics.py b/Lib/statistics.py index 9beafb341b3ad..c76a6ca519e40 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -894,6 +894,13 @@ def _normal_dist_inv_cdf(p, mu, sigma): return mu + (x * sigma) +# If available, use C implementation +try: + from _statistics import _normal_dist_inv_cdf +except ImportError: + pass + + class NormalDist: "Normal distribution of a random variable" # https://en.wikipedia.org/wiki/Normal_distribution @@ -1111,79 +1118,3 @@ def __hash__(self): def __repr__(self): return f'{type(self).__name__}(mu={self._mu!r}, sigma={self._sigma!r})' - -# If available, use C implementation -try: - from _statistics import _normal_dist_inv_cdf -except ImportError: - pass - - -if __name__ == '__main__': - - # Show math operations computed analytically in comparsion - # to a monte carlo simulation of the same operations - - from math import isclose - from operator import add, sub, mul, truediv - from itertools import repeat - import doctest - - g1 = NormalDist(10, 20) - g2 = NormalDist(-5, 25) - - # Test scaling by a constant - assert (g1 * 5 / 5).mean == g1.mean - assert (g1 * 5 / 5).stdev == g1.stdev - - n = 100_000 - G1 = g1.samples(n) - G2 = g2.samples(n) - - for func in (add, sub): - print(f'\nTest {func.__name__} with another NormalDist:') - print(func(g1, g2)) - print(NormalDist.from_samples(map(func, G1, G2))) - - const = 11 - for func in (add, sub, mul, truediv): - print(f'\nTest {func.__name__} with a constant:') - print(func(g1, const)) - print(NormalDist.from_samples(map(func, G1, repeat(const)))) - - const = 19 - for func in (add, sub, mul): - print(f'\nTest constant with {func.__name__}:') - print(func(const, g1)) - print(NormalDist.from_samples(map(func, repeat(const), G1))) - - def assert_close(G1, G2): - assert isclose(G1.mean, G1.mean, rel_tol=0.01), (G1, G2) - assert isclose(G1.stdev, G2.stdev, rel_tol=0.01), (G1, G2) - - X = NormalDist(-105, 73) - Y = NormalDist(31, 47) - s = 32.75 - n = 100_000 - - S = NormalDist.from_samples([x + s for x in X.samples(n)]) - assert_close(X + s, S) - - S = NormalDist.from_samples([x - s for x in X.samples(n)]) - assert_close(X - s, S) - - S = NormalDist.from_samples([x * s for x in X.samples(n)]) - assert_close(X * s, S) - - S = NormalDist.from_samples([x / s for x in X.samples(n)]) - assert_close(X / s, S) - - S = NormalDist.from_samples([x + y for x, y in zip(X.samples(n), - Y.samples(n))]) - assert_close(X + Y, S) - - S = NormalDist.from_samples([x - y for x, y in zip(X.samples(n), - Y.samples(n))]) - assert_close(X - Y, S) - - print(doctest.testmod()) From webhook-mailer at python.org Sun May 3 03:08:11 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Sun, 03 May 2020 07:08:11 -0000 Subject: [Python-checkins] bpo-29587: allow chaining NULL exceptions in _gen_throw() (GH-19877) Message-ID: https://github.com/python/cpython/commit/21893fbb74e8fde2931fbed9b511e2a41362b1ab commit: 21893fbb74e8fde2931fbed9b511e2a41362b1ab branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-03T00:07:57-07:00 summary: bpo-29587: allow chaining NULL exceptions in _gen_throw() (GH-19877) This is a follow-up to GH-19823 that removes the check that the exception value isn't NULL, prior to calling _PyErr_ChainExceptions(). This enables implicit exception chaining for gen.throw() in more circumstances. The commit also adds a test that a particular code snippet involving gen.throw() doesn't crash. The test shows why the new `gi_exc_state.exc_type != Py_None` check that was added is necessary. Without the new check, the code snippet (as well as a number of other tests) crashes on certain platforms (e.g. Fedora but not Mac). files: M Lib/test/test_generators.py M Objects/genobject.c diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 4d96f44b15062..5824ecd7c37e8 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -332,6 +332,26 @@ def f(): context = cm.exception.__context__ self.assertEqual((type(context), context.args), (KeyError, ('a',))) + def test_throw_after_none_exc_type(self): + def g(): + try: + raise KeyError + except KeyError: + pass + + try: + yield + except Exception: + # Without the `gi_exc_state.exc_type != Py_None` in + # _gen_throw(), this line was causing a crash ("Segmentation + # fault (core dumped)") on e.g. Fedora 32. + raise RuntimeError + + gen = g() + gen.send(None) + with self.assertRaises(RuntimeError) as cm: + gen.throw(ValueError) + class YieldFromTests(unittest.TestCase): def test_generator_gi_yieldfrom(self): diff --git a/Objects/genobject.c b/Objects/genobject.c index 41a63ae2e666a..b27fa929a2625 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -512,11 +512,12 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, } PyErr_Restore(typ, val, tb); - /* XXX Should we also handle the case where exc_type is true and - exc_value is false? */ - if (gen->gi_exc_state.exc_type && gen->gi_exc_state.exc_value) { + /* XXX It seems like we shouldn't have to check not equal to Py_None + here because exc_type should only ever be a class. But not including + this check was causing crashes on certain tests e.g. on Fedora. */ + if (gen->gi_exc_state.exc_type && gen->gi_exc_state.exc_type != Py_None) { Py_INCREF(gen->gi_exc_state.exc_type); - Py_INCREF(gen->gi_exc_state.exc_value); + Py_XINCREF(gen->gi_exc_state.exc_value); Py_XINCREF(gen->gi_exc_state.exc_traceback); _PyErr_ChainExceptions(gen->gi_exc_state.exc_type, gen->gi_exc_state.exc_value, gen->gi_exc_state.exc_traceback); From webhook-mailer at python.org Sun May 3 07:51:28 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sun, 03 May 2020 11:51:28 -0000 Subject: [Python-checkins] Simplify set entry insertion logic. (GH-19881) Message-ID: https://github.com/python/cpython/commit/3dd2157febae5087ca3333d24f69b6de9cbd13cd commit: 3dd2157febae5087ca3333d24f69b6de9cbd13cd branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-03T04:51:05-07:00 summary: Simplify set entry insertion logic. (GH-19881) files: M Objects/setobject.c diff --git a/Objects/setobject.c b/Objects/setobject.c index bbe013bcfac74..df4a0e1e9420e 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -137,7 +137,6 @@ static int set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) { setentry *table; - setentry *freeslot; setentry *entry; size_t perturb; size_t mask; @@ -158,7 +157,6 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) if (entry->key == NULL) goto found_unused; - freeslot = NULL; perturb = hash; while (1) { @@ -187,14 +185,12 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) goto restart; mask = so->mask; /* help avoid a register spill */ } - else if (entry->hash == -1) - freeslot = entry; if (i + LINEAR_PROBES <= mask) { for (j = 0 ; j < LINEAR_PROBES ; j++) { entry++; if (entry->hash == 0 && entry->key == NULL) - goto found_unused_or_dummy; + goto found_unused; if (entry->hash == hash) { PyObject *startkey = entry->key; assert(startkey != dummy); @@ -216,8 +212,6 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) goto restart; mask = so->mask; } - else if (entry->hash == -1) - freeslot = entry; } } @@ -226,17 +220,9 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) entry = &so->table[i]; if (entry->key == NULL) - goto found_unused_or_dummy; + goto found_unused; } - found_unused_or_dummy: - if (freeslot == NULL) - goto found_unused; - so->used++; - freeslot->key = key; - freeslot->hash = hash; - return 0; - found_unused: so->fill++; so->used++; From webhook-mailer at python.org Sun May 3 13:12:00 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sun, 03 May 2020 17:12:00 -0000 Subject: [Python-checkins] bpo-38870: Don't start generated output with newlines in ast.unparse (GH-19636) Message-ID: https://github.com/python/cpython/commit/493bf1cc316b0b5bd90779ecd1132878c881669e commit: 493bf1cc316b0b5bd90779ecd1132878c881669e branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-03T18:11:51+01:00 summary: bpo-38870: Don't start generated output with newlines in ast.unparse (GH-19636) files: M Lib/ast.py M Lib/test/test_unparse.py diff --git a/Lib/ast.py b/Lib/ast.py index 401af5647a240..5c68c4a66e1dd 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -669,10 +669,16 @@ def items_view(self, traverser, items): else: self.interleave(lambda: self.write(", "), traverser, items) + def maybe_newline(self): + """Adds a newline if it isn't the start of generated source""" + if self._source: + self.write("\n") + def fill(self, text=""): """Indent a piece of text and append it, according to the current indentation level""" - self.write("\n" + " " * self._indent + text) + self.maybe_newline() + self.write(" " * self._indent + text) def write(self, text): """Append a piece of text""" @@ -916,7 +922,7 @@ def visit_ExceptHandler(self, node): self.traverse(node.body) def visit_ClassDef(self, node): - self.write("\n") + self.maybe_newline() for deco in node.decorator_list: self.fill("@") self.traverse(deco) @@ -946,7 +952,7 @@ def visit_AsyncFunctionDef(self, node): self._function_helper(node, "async def") def _function_helper(self, node, fill_suffix): - self.write("\n") + self.maybe_newline() for deco in node.decorator_list: self.fill("@") self.traverse(deco) @@ -1043,7 +1049,7 @@ def _fstring_FormattedValue(self, node, write): write("{") unparser = type(self)() unparser.set_precedence(_Precedence.TEST.next(), node.value) - expr = unparser.visit(node.value).rstrip("\n") + expr = unparser.visit(node.value) if expr.startswith("{"): write(" ") # Separate pair of opening brackets as "{ {" write(expr) diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index d4089a3fc1cdf..2be44b246aa69 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -128,19 +128,17 @@ def check_ast_roundtrip(self, code1, **kwargs): def check_invalid(self, node, raises=ValueError): self.assertRaises(raises, ast.unparse, node) - def get_source(self, code1, code2=None, strip=True): + def get_source(self, code1, code2=None): code2 = code2 or code1 code1 = ast.unparse(ast.parse(code1)) - if strip: - code1 = code1.strip() return code1, code2 - def check_src_roundtrip(self, code1, code2=None, strip=True): - code1, code2 = self.get_source(code1, code2, strip) + def check_src_roundtrip(self, code1, code2=None): + code1, code2 = self.get_source(code1, code2) self.assertEqual(code2, code1) - def check_src_dont_roundtrip(self, code1, code2=None, strip=True): - code1, code2 = self.get_source(code1, code2, strip) + def check_src_dont_roundtrip(self, code1, code2=None): + code1, code2 = self.get_source(code1, code2) self.assertNotEqual(code2, code1) class UnparseTestCase(ASTTestCase): From webhook-mailer at python.org Sun May 3 14:25:54 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sun, 03 May 2020 18:25:54 -0000 Subject: [Python-checkins] Remove out-of-date comment (GH-19886) Message-ID: https://github.com/python/cpython/commit/d699d5e6178adca785a8701c32daf5e18fad0bf1 commit: d699d5e6178adca785a8701c32daf5e18fad0bf1 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-03T11:25:46-07:00 summary: Remove out-of-date comment (GH-19886) files: M Objects/setobject.c diff --git a/Objects/setobject.c b/Objects/setobject.c index df4a0e1e9420e..0e4e45f60a9cc 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -243,8 +243,7 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) /* Internal routine used by set_table_resize() to insert an item which is -known to be absent from the set. This routine also assumes that -the set contains no deleted entries. Besides the performance benefit, +known to be absent from the set. Besides the performance benefit, there is also safety benefit since using set_add_entry() risks making a callback in the middle of a set_table_resize(), see issue 1456209. The caller is responsible for updating the key's reference count and From webhook-mailer at python.org Sun May 3 19:45:17 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sun, 03 May 2020 23:45:17 -0000 Subject: [Python-checkins] Remove outdated and confusing advice about setting maxsize (GH-19889) Message-ID: https://github.com/python/cpython/commit/ad9eaeab74b680830dbefc18e8fe3dec4677a21b commit: ad9eaeab74b680830dbefc18e8fe3dec4677a21b branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-03T16:45:13-07:00 summary: Remove outdated and confusing advice about setting maxsize (GH-19889) files: M Doc/library/functools.rst diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index e708a0d99cd00..856c1c790ae36 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -107,8 +107,7 @@ The :mod:`functools` module defines the following functions: return sum(sentence.count(vowel) for vowel in 'aeiou') If *maxsize* is set to ``None``, the LRU feature is disabled and the cache can - grow without bound. The LRU feature performs best when *maxsize* is a - power-of-two. + grow without bound. If *typed* is set to true, function arguments of different types will be cached separately. For example, ``f(3)`` and ``f(3.0)`` will be treated From webhook-mailer at python.org Sun May 3 20:16:42 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 04 May 2020 00:16:42 -0000 Subject: [Python-checkins] Remove outdated and confusing advice about setting maxsize (GH-19889) (GH-19890) Message-ID: https://github.com/python/cpython/commit/9609460ce0b71215eff2d057b6780950e00be013 commit: 9609460ce0b71215eff2d057b6780950e00be013 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-03T17:16:36-07:00 summary: Remove outdated and confusing advice about setting maxsize (GH-19889) (GH-19890) files: M Doc/library/functools.rst diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 3a0b554e923c7..0fb8d900c7362 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -101,8 +101,7 @@ The :mod:`functools` module defines the following functions: return sum(sentence.count(vowel) for vowel in 'aeiou') If *maxsize* is set to ``None``, the LRU feature is disabled and the cache can - grow without bound. The LRU feature performs best when *maxsize* is a - power-of-two. + grow without bound. If *typed* is set to true, function arguments of different types will be cached separately. For example, ``f(3)`` and ``f(3.0)`` will be treated From webhook-mailer at python.org Sun May 3 20:20:13 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Mon, 04 May 2020 00:20:13 -0000 Subject: [Python-checkins] bpo-40334: Set error_indicator in _PyPegen_raise_error (GH-19887) Message-ID: https://github.com/python/cpython/commit/7f06af684a1882fdb19d20650825948b1d7996e5 commit: 7f06af684a1882fdb19d20650825948b1d7996e5 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-04T01:20:09+01:00 summary: bpo-40334: Set error_indicator in _PyPegen_raise_error (GH-19887) Due to PyErr_Occurred not being called at the beginning of each rule, we need to set the error indicator, so that rules do not get expanded after an exception has been thrown files: M Parser/pegen/pegen.c diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 6ff09b3b31f78..9858f71c83c79 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -389,6 +389,7 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const ch Token *t = p->tokens[p->fill - 1]; Py_ssize_t col_number = !with_col_number; va_list va; + p->error_indicator = 1; va_start(va, errmsg); errstr = PyUnicode_FromFormatV(errmsg, va); From webhook-mailer at python.org Sun May 3 22:03:13 2020 From: webhook-mailer at python.org (Anthony Shaw) Date: Mon, 04 May 2020 02:03:13 -0000 Subject: [Python-checkins] Clean up unused imports for the peg generator module (GH-19891) Message-ID: https://github.com/python/cpython/commit/c95e691c904bb5ebd91825efa81b93cb9e354a85 commit: c95e691c904bb5ebd91825efa81b93cb9e354a85 branch: master author: Anthony Shaw committer: GitHub date: 2020-05-04T03:03:05+01:00 summary: Clean up unused imports for the peg generator module (GH-19891) files: M Tools/peg_generator/pegen/build.py M Tools/peg_generator/pegen/c_generator.py M Tools/peg_generator/pegen/first_sets.py M Tools/peg_generator/pegen/grammar.py M Tools/peg_generator/pegen/grammar_visualizer.py M Tools/peg_generator/scripts/ast_timings.py M Tools/peg_generator/scripts/benchmark.py M Tools/peg_generator/scripts/find_max_nesting.py M Tools/peg_generator/scripts/test_parse_directory.py diff --git a/Tools/peg_generator/pegen/build.py b/Tools/peg_generator/pegen/build.py index 8f9348ddf24ac..931ffc787523b 100644 --- a/Tools/peg_generator/pegen/build.py +++ b/Tools/peg_generator/pegen/build.py @@ -1,12 +1,11 @@ import pathlib import shutil import tokenize -import sys import sysconfig import tempfile import itertools -from typing import Optional, Tuple, List, IO, Iterator, Set, Dict +from typing import Optional, Tuple, List, IO, Set, Dict from pegen.c_generator import CParserGenerator from pegen.grammar import Grammar diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index c9c67067d4677..40004e7875278 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -1,7 +1,7 @@ import ast from dataclasses import dataclass, field import re -from typing import IO, Any, Dict, List, Optional, Set, Text, Tuple +from typing import Any, Dict, IO, Optional, List, Text, Tuple, Set from enum import Enum from pegen import grammar diff --git a/Tools/peg_generator/pegen/first_sets.py b/Tools/peg_generator/pegen/first_sets.py index da30eba99ce5a..71be5a2e7cbf4 100755 --- a/Tools/peg_generator/pegen/first_sets.py +++ b/Tools/peg_generator/pegen/first_sets.py @@ -1,10 +1,9 @@ #!/usr/bin/env python3.8 import argparse -import collections import pprint import sys -from typing import Optional, Set, Dict +from typing import Set, Dict from pegen.build import build_parser from pegen.grammar import ( diff --git a/Tools/peg_generator/pegen/grammar.py b/Tools/peg_generator/pegen/grammar.py index 67039d5a032ab..78edf412ea6e4 100644 --- a/Tools/peg_generator/pegen/grammar.py +++ b/Tools/peg_generator/pegen/grammar.py @@ -4,7 +4,6 @@ from typing import ( AbstractSet, Any, - Callable, Dict, Iterable, Iterator, @@ -13,11 +12,9 @@ Set, Tuple, TYPE_CHECKING, - TypeVar, Union, ) -from pegen.parser import memoize, Parser if TYPE_CHECKING: from pegen.parser_generator import ParserGenerator diff --git a/Tools/peg_generator/pegen/grammar_visualizer.py b/Tools/peg_generator/pegen/grammar_visualizer.py index b1d51d2cdb250..7362ec5fa0f4d 100644 --- a/Tools/peg_generator/pegen/grammar_visualizer.py +++ b/Tools/peg_generator/pegen/grammar_visualizer.py @@ -1,7 +1,7 @@ import argparse import sys -from typing import Any, Iterator, Iterable, Callable +from typing import Any, Iterator, Callable from pegen.build import build_parser from pegen.grammar import Grammar, Rule diff --git a/Tools/peg_generator/scripts/ast_timings.py b/Tools/peg_generator/scripts/ast_timings.py index 7ebd46fdac685..ca252208f283e 100644 --- a/Tools/peg_generator/scripts/ast_timings.py +++ b/Tools/peg_generator/scripts/ast_timings.py @@ -1,8 +1,6 @@ import ast import sys import time -import token -import tokenize from pegen.testutil import print_memstats diff --git a/Tools/peg_generator/scripts/benchmark.py b/Tools/peg_generator/scripts/benchmark.py index 0e9d5bd104813..d39ac3dca79e7 100644 --- a/Tools/peg_generator/scripts/benchmark.py +++ b/Tools/peg_generator/scripts/benchmark.py @@ -4,7 +4,6 @@ import ast import sys import os -import resource from time import time import memory_profiler diff --git a/Tools/peg_generator/scripts/find_max_nesting.py b/Tools/peg_generator/scripts/find_max_nesting.py index a2c41a821342a..f2fdd00bfb7cd 100755 --- a/Tools/peg_generator/scripts/find_max_nesting.py +++ b/Tools/peg_generator/scripts/find_max_nesting.py @@ -13,11 +13,7 @@ Usage: python -m scripts.find_max_nesting """ -import os import sys -from tempfile import TemporaryDirectory -from pathlib import Path -from typing import Any from _peg_parser import parse_string diff --git a/Tools/peg_generator/scripts/test_parse_directory.py b/Tools/peg_generator/scripts/test_parse_directory.py index a6078ef564061..aef9c74b52881 100755 --- a/Tools/peg_generator/scripts/test_parse_directory.py +++ b/Tools/peg_generator/scripts/test_parse_directory.py @@ -4,7 +4,6 @@ import ast import os import sys -import tempfile import time import traceback from glob import glob From webhook-mailer at python.org Mon May 4 01:08:22 2020 From: webhook-mailer at python.org (Shantanu) Date: Mon, 04 May 2020 05:08:22 -0000 Subject: [Python-checkins] bpo-40493: fix function type comment parsing (GH-19894) Message-ID: https://github.com/python/cpython/commit/603d3546264149f323edb7952b60075fb6bc4dc2 commit: 603d3546264149f323edb7952b60075fb6bc4dc2 branch: master author: Shantanu committer: GitHub date: 2020-05-03T22:08:14-07:00 summary: bpo-40493: fix function type comment parsing (GH-19894) The grammar for func_type_input rejected things like `(*t1) ->t2`. This fixes that. Automerge-Triggered-By: @gvanrossum files: M Grammar/python.gram M Lib/test/test_type_comments.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index cbd4bc010dc1e..8e494905cea32 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -40,6 +40,10 @@ type_expressions[asdl_seq*]: _PyPegen_seq_append_to_end(p, CHECK(_PyPegen_seq_append_to_end(p, a, b)), c) } | a=','.expression+ ',' '*' b=expression { _PyPegen_seq_append_to_end(p, a, b) } | a=','.expression+ ',' '**' b=expression { _PyPegen_seq_append_to_end(p, a, b) } + | '*' a=expression ',' '**' b=expression { + _PyPegen_seq_append_to_end(p, CHECK(_PyPegen_singleton_seq(p, a)), b) } + | '*' a=expression { _PyPegen_singleton_seq(p, a) } + | '**' a=expression { _PyPegen_singleton_seq(p, a) } | ','.expression+ statements[asdl_seq*]: a=statement+ { _PyPegen_seq_flatten(p, a) } diff --git a/Lib/test/test_type_comments.py b/Lib/test/test_type_comments.py index 6027b3b56f76f..71d1430dbc939 100644 --- a/Lib/test/test_type_comments.py +++ b/Lib/test/test_type_comments.py @@ -399,6 +399,14 @@ def parse_func_type_input(source): self.assertEqual(tree.argtypes[2].id, "Any") self.assertEqual(tree.returns.id, "float") + tree = parse_func_type_input("(*int) -> None") + self.assertEqual(tree.argtypes[0].id, "int") + tree = parse_func_type_input("(**int) -> None") + self.assertEqual(tree.argtypes[0].id, "int") + tree = parse_func_type_input("(*int, **str) -> None") + self.assertEqual(tree.argtypes[0].id, "int") + self.assertEqual(tree.argtypes[1].id, "str") + with self.assertRaises(SyntaxError): tree = parse_func_type_input("(int, *str, *Any) -> float") diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index b4745ba4d4f26..492b5e6f9e2b7 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -825,6 +825,9 @@ fstring_rule(Parser *p) // | ','.expression+ ',' '*' expression ',' '**' expression // | ','.expression+ ',' '*' expression // | ','.expression+ ',' '**' expression +// | '*' expression ',' '**' expression +// | '*' expression +// | '**' expression // | ','.expression+ static asdl_seq* type_expressions_rule(Parser *p) @@ -915,6 +918,69 @@ type_expressions_rule(Parser *p) } p->mark = mark; } + { // '*' expression ',' '**' expression + expr_ty a; + expr_ty b; + Token * literal; + Token * literal_1; + Token * literal_2; + if ( + (literal = _PyPegen_expect_token(p, 16)) + && + (a = expression_rule(p)) + && + (literal_1 = _PyPegen_expect_token(p, 12)) + && + (literal_2 = _PyPegen_expect_token(p, 35)) + && + (b = expression_rule(p)) + ) + { + res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + { // '*' expression + expr_ty a; + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 16)) + && + (a = expression_rule(p)) + ) + { + res = _PyPegen_singleton_seq ( p , a ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + { // '**' expression + expr_ty a; + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 35)) + && + (a = expression_rule(p)) + ) + { + res = _PyPegen_singleton_seq ( p , a ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } { // ','.expression+ asdl_seq * _gather_9_var; if ( From webhook-mailer at python.org Mon May 4 03:56:13 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Mon, 04 May 2020 07:56:13 -0000 Subject: [Python-checkins] bpo-40408: Fix support of nested type variables in GenericAlias. (GH-19836) Message-ID: https://github.com/python/cpython/commit/41a64587a0fd68bcd21ba42999cd3940801dff7c commit: 41a64587a0fd68bcd21ba42999cd3940801dff7c branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-04T10:56:05+03:00 summary: bpo-40408: Fix support of nested type variables in GenericAlias. (GH-19836) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-01-15-36-14.bpo-40408.XzQI59.rst M Lib/test/test_genericalias.py M Objects/genericaliasobject.c diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py index 37cbf92ed1161..024b2f6ed6636 100644 --- a/Lib/test/test_genericalias.py +++ b/Lib/test/test_genericalias.py @@ -41,6 +41,8 @@ from typing import TypeVar T = TypeVar('T') +K = TypeVar('K') +V = TypeVar('V') class BaseTest(unittest.TestCase): """Test basics.""" @@ -170,10 +172,7 @@ def test_exposed_type(self): self.assertEqual(a.__parameters__, ()) def test_parameters(self): - from typing import TypeVar - T = TypeVar('T') - K = TypeVar('K') - V = TypeVar('V') + from typing import List, Dict, Callable D0 = dict[str, int] self.assertEqual(D0.__args__, (str, int)) self.assertEqual(D0.__parameters__, ()) @@ -195,14 +194,43 @@ def test_parameters(self): L1 = list[T] self.assertEqual(L1.__args__, (T,)) self.assertEqual(L1.__parameters__, (T,)) + L2 = list[list[T]] + self.assertEqual(L2.__args__, (list[T],)) + self.assertEqual(L2.__parameters__, (T,)) + L3 = list[List[T]] + self.assertEqual(L3.__args__, (List[T],)) + self.assertEqual(L3.__parameters__, (T,)) + L4a = list[Dict[K, V]] + self.assertEqual(L4a.__args__, (Dict[K, V],)) + self.assertEqual(L4a.__parameters__, (K, V)) + L4b = list[Dict[T, int]] + self.assertEqual(L4b.__args__, (Dict[T, int],)) + self.assertEqual(L4b.__parameters__, (T,)) + L5 = list[Callable[[K, V], K]] + self.assertEqual(L5.__args__, (Callable[[K, V], K],)) + self.assertEqual(L5.__parameters__, (K, V)) def test_parameter_chaining(self): - from typing import TypeVar - T = TypeVar('T') + from typing import List, Dict, Union, Callable self.assertEqual(list[T][int], list[int]) self.assertEqual(dict[str, T][int], dict[str, int]) self.assertEqual(dict[T, int][str], dict[str, int]) + self.assertEqual(dict[K, V][str, int], dict[str, int]) self.assertEqual(dict[T, T][int], dict[int, int]) + + self.assertEqual(list[list[T]][int], list[list[int]]) + self.assertEqual(list[dict[T, int]][str], list[dict[str, int]]) + self.assertEqual(list[dict[str, T]][int], list[dict[str, int]]) + self.assertEqual(list[dict[K, V]][str, int], list[dict[str, int]]) + self.assertEqual(dict[T, list[int]][str], dict[str, list[int]]) + + self.assertEqual(list[List[T]][int], list[List[int]]) + self.assertEqual(list[Dict[K, V]][str, int], list[Dict[str, int]]) + self.assertEqual(list[Union[K, V]][str, int], list[Union[str, int]]) + self.assertEqual(list[Callable[[K, V], K]][str, int], + list[Callable[[str, int], str]]) + self.assertEqual(dict[T, List[int]][str], dict[str, List[int]]) + with self.assertRaises(TypeError): list[int][int] dict[T, int][str, int] @@ -255,7 +283,6 @@ def test_union(self): self.assertEqual(a.__parameters__, ()) def test_union_generic(self): - T = typing.TypeVar('T') a = typing.Union[list[T], tuple[T, ...]] self.assertEqual(a.__args__, (list[T], tuple[T, ...])) self.assertEqual(a.__parameters__, (T,)) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-15-36-14.bpo-40408.XzQI59.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-15-36-14.bpo-40408.XzQI59.rst new file mode 100644 index 0000000000000..e6822f9c24044 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-15-36-14.bpo-40408.XzQI59.rst @@ -0,0 +1,2 @@ +Fixed support of nested type variables in GenericAlias (e.g. +``list[list[T]]``). diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c index a56bdda38177f..c06d79c556190 100644 --- a/Objects/genericaliasobject.c +++ b/Objects/genericaliasobject.c @@ -182,28 +182,60 @@ tuple_index(PyObject *self, Py_ssize_t len, PyObject *item) return -1; } -// tuple(t for t in args if isinstance(t, TypeVar)) +static int +tuple_add(PyObject *self, Py_ssize_t len, PyObject *item) +{ + if (tuple_index(self, len, item) < 0) { + Py_INCREF(item); + PyTuple_SET_ITEM(self, len, item); + return 1; + } + return 0; +} + static PyObject * make_parameters(PyObject *args) { - Py_ssize_t len = PyTuple_GET_SIZE(args); + Py_ssize_t nargs = PyTuple_GET_SIZE(args); + Py_ssize_t len = nargs; PyObject *parameters = PyTuple_New(len); if (parameters == NULL) return NULL; Py_ssize_t iparam = 0; - for (Py_ssize_t iarg = 0; iarg < len; iarg++) { + for (Py_ssize_t iarg = 0; iarg < nargs; iarg++) { PyObject *t = PyTuple_GET_ITEM(args, iarg); int typevar = is_typevar(t); if (typevar < 0) { - Py_XDECREF(parameters); + Py_DECREF(parameters); return NULL; } if (typevar) { - if (tuple_index(parameters, iparam, t) < 0) { - Py_INCREF(t); - PyTuple_SET_ITEM(parameters, iparam, t); - iparam++; + iparam += tuple_add(parameters, iparam, t); + } + else { + _Py_IDENTIFIER(__parameters__); + PyObject *subparams; + if (_PyObject_LookupAttrId(t, &PyId___parameters__, &subparams) < 0) { + Py_DECREF(parameters); + return NULL; + } + if (subparams && PyTuple_Check(subparams)) { + Py_ssize_t len2 = PyTuple_GET_SIZE(subparams); + Py_ssize_t needed = len2 - 1 - (iarg - iparam); + if (needed > 0) { + len += needed; + if (_PyTuple_Resize(¶meters, len) < 0) { + Py_DECREF(subparams); + Py_DECREF(parameters); + return NULL; + } + } + for (Py_ssize_t j = 0; j < len2; j++) { + PyObject *t2 = PyTuple_GET_ITEM(subparams, j); + iparam += tuple_add(parameters, iparam, t2); + } } + Py_XDECREF(subparams); } } if (iparam < len) { @@ -215,6 +247,48 @@ make_parameters(PyObject *args) return parameters; } +/* If obj is a generic alias, substitute type variables params + with substitutions argitems. For example, if obj is list[T], + params is (T, S), and argitems is (str, int), return list[str]. + If obj doesn't have a __parameters__ attribute or that's not + a non-empty tuple, return a new reference to obj. */ +static PyObject * +subs_tvars(PyObject *obj, PyObject *params, PyObject **argitems) +{ + _Py_IDENTIFIER(__parameters__); + PyObject *subparams; + if (_PyObject_LookupAttrId(obj, &PyId___parameters__, &subparams) < 0) { + return NULL; + } + if (subparams && PyTuple_Check(subparams) && PyTuple_GET_SIZE(subparams)) { + Py_ssize_t nparams = PyTuple_GET_SIZE(params); + Py_ssize_t nsubargs = PyTuple_GET_SIZE(subparams); + PyObject *subargs = PyTuple_New(nsubargs); + if (subargs == NULL) { + Py_DECREF(subparams); + return NULL; + } + for (Py_ssize_t i = 0; i < nsubargs; ++i) { + PyObject *arg = PyTuple_GET_ITEM(subparams, i); + Py_ssize_t iparam = tuple_index(params, nparams, arg); + if (iparam >= 0) { + arg = argitems[iparam]; + } + Py_INCREF(arg); + PyTuple_SET_ITEM(subargs, i, arg); + } + + obj = PyObject_GetItem(obj, subargs); + + Py_DECREF(subargs); + } + else { + Py_INCREF(obj); + } + Py_XDECREF(subparams); + return obj; +} + static PyObject * ga_getitem(PyObject *self, PyObject *item) { @@ -233,17 +307,25 @@ ga_getitem(PyObject *self, PyObject *item) self); } int is_tuple = PyTuple_Check(item); - Py_ssize_t nitem = is_tuple ? PyTuple_GET_SIZE(item) : 1; - if (nitem != nparams) { + Py_ssize_t nitems = is_tuple ? PyTuple_GET_SIZE(item) : 1; + PyObject **argitems = is_tuple ? &PyTuple_GET_ITEM(item, 0) : &item; + if (nitems != nparams) { return PyErr_Format(PyExc_TypeError, "Too %s arguments for %R", - nitem > nparams ? "many" : "few", + nitems > nparams ? "many" : "few", self); } + /* Replace all type variables (specified by alias->parameters) + with corresponding values specified by argitems. + t = list[T]; t[int] -> newargs = [int] + t = dict[str, T]; t[int] -> newargs = [str, int] + t = dict[T, list[S]]; t[str, int] -> newargs = [str, list[int]] + */ Py_ssize_t nargs = PyTuple_GET_SIZE(alias->args); PyObject *newargs = PyTuple_New(nargs); - if (newargs == NULL) + if (newargs == NULL) { return NULL; + } for (Py_ssize_t iarg = 0; iarg < nargs; iarg++) { PyObject *arg = PyTuple_GET_ITEM(alias->args, iarg); int typevar = is_typevar(arg); @@ -254,18 +336,21 @@ ga_getitem(PyObject *self, PyObject *item) if (typevar) { Py_ssize_t iparam = tuple_index(alias->parameters, nparams, arg); assert(iparam >= 0); - if (is_tuple) { - arg = PyTuple_GET_ITEM(item, iparam); - } - else { - assert(iparam == 0); - arg = item; + arg = argitems[iparam]; + Py_INCREF(arg); + } + else { + arg = subs_tvars(arg, alias->parameters, argitems); + if (arg == NULL) { + Py_DECREF(newargs); + return NULL; } } - Py_INCREF(arg); PyTuple_SET_ITEM(newargs, iarg, arg); } + PyObject *res = Py_GenericAlias(alias->origin, newargs); + Py_DECREF(newargs); return res; } From webhook-mailer at python.org Mon May 4 04:13:34 2020 From: webhook-mailer at python.org (Shantanu) Date: Mon, 04 May 2020 08:13:34 -0000 Subject: [Python-checkins] bpo-40491: Fix typo in syntax error for numeric literals (GH-19893) Message-ID: https://github.com/python/cpython/commit/c3f001461d5794c81cf5f70e08ae5435fe935ceb commit: c3f001461d5794c81cf5f70e08ae5435fe935ceb branch: master author: Shantanu committer: GitHub date: 2020-05-04T11:13:30+03:00 summary: bpo-40491: Fix typo in syntax error for numeric literals (GH-19893) files: M Parser/pegen/pegen.c diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 9858f71c83c79..391f9b91eab90 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -907,7 +907,7 @@ _PyPegen_number_token(Parser *p) if (p->feature_version < 6 && strchr(num_raw, '_') != NULL) { p->error_indicator = 1; - return RAISE_SYNTAX_ERROR("Underscores in numeric literals are only supported" + return RAISE_SYNTAX_ERROR("Underscores in numeric literals are only supported " "in Python 3.6 and greater"); } From webhook-mailer at python.org Mon May 4 06:58:43 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Mon, 04 May 2020 10:58:43 -0000 Subject: [Python-checkins] bpo-40334: Spacialized error message for invalid args after bare '*' (GH-19865) Message-ID: https://github.com/python/cpython/commit/e10e7c771bf06112c4a311e0ef6b8af6423b0cca commit: e10e7c771bf06112c4a311e0ef6b8af6423b0cca branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-04T11:58:31+01:00 summary: bpo-40334: Spacialized error message for invalid args after bare '*' (GH-19865) When parsing things like `def f(*): pass` the old parser used to output `SyntaxError: named arguments must follow bare *`, which the new parser wasn't able to do. files: M Grammar/python.gram M Lib/test/test_exceptions.py M Lib/test/test_peg_parser.py M Lib/test/test_syntax.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 8e494905cea32..0ce6ab4b4ba90 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -249,6 +249,7 @@ star_etc[StarEtc*]: | '*' ',' b=param_maybe_default+ c=[kwds] { _PyPegen_star_etc(p, NULL, b, c) } | a=kwds { _PyPegen_star_etc(p, NULL, NULL, a) } + | invalid_star_etc kwds[arg_ty]: '**' a=param_no_default { a } @@ -356,6 +357,7 @@ lambda_star_etc[StarEtc*]: | '*' ',' b=lambda_param_maybe_default+ c=[lambda_kwds] { _PyPegen_star_etc(p, NULL, b, c) } | a=lambda_kwds { _PyPegen_star_etc(p, NULL, NULL, a) } + | invalid_lambda_star_etc lambda_kwds[arg_ty]: '**' a=lambda_param_no_default { a } @@ -636,6 +638,10 @@ invalid_comprehension: invalid_parameters: | param_no_default* (slash_with_default | param_with_default+) param_no_default { RAISE_SYNTAX_ERROR("non-default argument follows default argument") } +invalid_star_etc: + | '*' (')' | ',' (')' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") } +invalid_lambda_star_etc: + | '*' (':' | ',' (':' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") } invalid_double_type_comments: | TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT { RAISE_SYNTAX_ERROR("Cannot have two type comments on def") } diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index 354b3f4843718..d83b73ab340c3 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -242,11 +242,11 @@ def baz(): check('from __future__ import doesnt_exist', 1, 1) check('from __future__ import braces', 1, 1) check('x=1\nfrom __future__ import division', 2, 1) + check('def f(*):\n pass', 1, 7 if support.use_old_parser() else 8) @support.skip_if_new_parser("Pegen column offsets might be different") def testSyntaxErrorOffsetCustom(self): self.check('for 1 in []: pass', 1, 5) - self.check('def f(*):\n pass', 1, 7) self.check('[*x for x in xs]', 1, 2) self.check('def f():\n x, y: int', 2, 3) self.check('(yield i) = 2', 1, 1) diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index 191494481eb0a..d6939fdbf618a 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -603,6 +603,12 @@ def f(): ("1 += 1", "cannot assign to literal"), ("pass\n pass", "unexpected indent"), ("def f():\npass", "expected an indented block"), + ("def f(*): pass", "named arguments must follow bare *"), + ("def f(*,): pass", "named arguments must follow bare *"), + ("def f(*, **a): pass", "named arguments must follow bare *"), + ("lambda *: pass", "named arguments must follow bare *"), + ("lambda *,: pass", "named arguments must follow bare *"), + ("lambda *, **a: pass", "named arguments must follow bare *"), ] GOOD_BUT_FAIL_TEST_CASES = [ diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index e7468cae7b132..0c0fc48e0d3de 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -627,9 +627,9 @@ Traceback (most recent call last): SyntaxError: cannot assign to __debug__ - # >>> with (lambda *:0): pass - # Traceback (most recent call last): - # SyntaxError: named arguments must follow bare * + >>> with (lambda *:0): pass + Traceback (most recent call last): + SyntaxError: named arguments must follow bare * Corner-cases that used to crash: diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 492b5e6f9e2b7..55605d5770f1e 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -215,150 +215,156 @@ static KeywordToken *reserved_keywords[] = { #define invalid_block_type 1144 #define invalid_comprehension_type 1145 #define invalid_parameters_type 1146 -#define invalid_double_type_comments_type 1147 -#define _loop0_1_type 1148 -#define _loop0_2_type 1149 -#define _loop0_4_type 1150 -#define _gather_3_type 1151 -#define _loop0_6_type 1152 -#define _gather_5_type 1153 -#define _loop0_8_type 1154 -#define _gather_7_type 1155 -#define _loop0_10_type 1156 -#define _gather_9_type 1157 -#define _loop1_11_type 1158 -#define _loop0_13_type 1159 -#define _gather_12_type 1160 -#define _tmp_14_type 1161 -#define _tmp_15_type 1162 -#define _tmp_16_type 1163 -#define _tmp_17_type 1164 -#define _tmp_18_type 1165 -#define _tmp_19_type 1166 -#define _tmp_20_type 1167 -#define _tmp_21_type 1168 -#define _loop1_22_type 1169 -#define _tmp_23_type 1170 -#define _tmp_24_type 1171 -#define _loop0_26_type 1172 -#define _gather_25_type 1173 -#define _loop0_28_type 1174 -#define _gather_27_type 1175 -#define _tmp_29_type 1176 -#define _loop0_30_type 1177 -#define _loop1_31_type 1178 -#define _loop0_33_type 1179 -#define _gather_32_type 1180 -#define _tmp_34_type 1181 -#define _loop0_36_type 1182 -#define _gather_35_type 1183 -#define _tmp_37_type 1184 -#define _loop0_39_type 1185 -#define _gather_38_type 1186 -#define _loop0_41_type 1187 -#define _gather_40_type 1188 -#define _loop0_43_type 1189 -#define _gather_42_type 1190 -#define _loop0_45_type 1191 -#define _gather_44_type 1192 -#define _tmp_46_type 1193 -#define _loop1_47_type 1194 -#define _tmp_48_type 1195 -#define _tmp_49_type 1196 -#define _tmp_50_type 1197 -#define _tmp_51_type 1198 -#define _tmp_52_type 1199 -#define _loop0_53_type 1200 -#define _loop0_54_type 1201 -#define _loop0_55_type 1202 -#define _loop1_56_type 1203 -#define _loop0_57_type 1204 -#define _loop1_58_type 1205 -#define _loop1_59_type 1206 -#define _loop1_60_type 1207 -#define _loop0_61_type 1208 -#define _loop1_62_type 1209 -#define _loop0_63_type 1210 -#define _loop1_64_type 1211 -#define _loop0_65_type 1212 -#define _loop1_66_type 1213 -#define _loop1_67_type 1214 -#define _tmp_68_type 1215 -#define _loop0_70_type 1216 -#define _gather_69_type 1217 -#define _loop1_71_type 1218 -#define _loop0_73_type 1219 -#define _gather_72_type 1220 -#define _loop1_74_type 1221 -#define _loop0_75_type 1222 -#define _loop0_76_type 1223 -#define _loop0_77_type 1224 -#define _loop1_78_type 1225 -#define _loop0_79_type 1226 -#define _loop1_80_type 1227 -#define _loop1_81_type 1228 -#define _loop1_82_type 1229 -#define _loop0_83_type 1230 -#define _loop1_84_type 1231 -#define _loop0_85_type 1232 -#define _loop1_86_type 1233 -#define _loop0_87_type 1234 -#define _loop1_88_type 1235 -#define _loop1_89_type 1236 -#define _loop1_90_type 1237 -#define _loop1_91_type 1238 -#define _tmp_92_type 1239 -#define _loop0_94_type 1240 -#define _gather_93_type 1241 -#define _tmp_95_type 1242 -#define _tmp_96_type 1243 -#define _tmp_97_type 1244 -#define _tmp_98_type 1245 -#define _loop1_99_type 1246 -#define _tmp_100_type 1247 -#define _tmp_101_type 1248 -#define _loop0_103_type 1249 -#define _gather_102_type 1250 -#define _loop1_104_type 1251 -#define _loop0_105_type 1252 -#define _loop0_106_type 1253 -#define _tmp_107_type 1254 -#define _tmp_108_type 1255 -#define _loop0_110_type 1256 -#define _gather_109_type 1257 -#define _loop0_112_type 1258 -#define _gather_111_type 1259 -#define _loop0_114_type 1260 -#define _gather_113_type 1261 -#define _loop0_116_type 1262 -#define _gather_115_type 1263 -#define _loop0_117_type 1264 -#define _loop0_119_type 1265 -#define _gather_118_type 1266 -#define _tmp_120_type 1267 -#define _loop0_122_type 1268 -#define _gather_121_type 1269 -#define _loop0_124_type 1270 -#define _gather_123_type 1271 -#define _tmp_125_type 1272 -#define _tmp_126_type 1273 -#define _tmp_127_type 1274 -#define _tmp_128_type 1275 -#define _tmp_129_type 1276 -#define _loop0_130_type 1277 -#define _tmp_131_type 1278 -#define _tmp_132_type 1279 -#define _tmp_133_type 1280 -#define _tmp_134_type 1281 -#define _tmp_135_type 1282 -#define _tmp_136_type 1283 -#define _tmp_137_type 1284 -#define _tmp_138_type 1285 -#define _tmp_139_type 1286 -#define _tmp_140_type 1287 -#define _tmp_141_type 1288 -#define _tmp_142_type 1289 -#define _loop1_143_type 1290 +#define invalid_star_etc_type 1147 +#define invalid_lambda_star_etc_type 1148 +#define invalid_double_type_comments_type 1149 +#define _loop0_1_type 1150 +#define _loop0_2_type 1151 +#define _loop0_4_type 1152 +#define _gather_3_type 1153 +#define _loop0_6_type 1154 +#define _gather_5_type 1155 +#define _loop0_8_type 1156 +#define _gather_7_type 1157 +#define _loop0_10_type 1158 +#define _gather_9_type 1159 +#define _loop1_11_type 1160 +#define _loop0_13_type 1161 +#define _gather_12_type 1162 +#define _tmp_14_type 1163 +#define _tmp_15_type 1164 +#define _tmp_16_type 1165 +#define _tmp_17_type 1166 +#define _tmp_18_type 1167 +#define _tmp_19_type 1168 +#define _tmp_20_type 1169 +#define _tmp_21_type 1170 +#define _loop1_22_type 1171 +#define _tmp_23_type 1172 +#define _tmp_24_type 1173 +#define _loop0_26_type 1174 +#define _gather_25_type 1175 +#define _loop0_28_type 1176 +#define _gather_27_type 1177 +#define _tmp_29_type 1178 +#define _loop0_30_type 1179 +#define _loop1_31_type 1180 +#define _loop0_33_type 1181 +#define _gather_32_type 1182 +#define _tmp_34_type 1183 +#define _loop0_36_type 1184 +#define _gather_35_type 1185 +#define _tmp_37_type 1186 +#define _loop0_39_type 1187 +#define _gather_38_type 1188 +#define _loop0_41_type 1189 +#define _gather_40_type 1190 +#define _loop0_43_type 1191 +#define _gather_42_type 1192 +#define _loop0_45_type 1193 +#define _gather_44_type 1194 +#define _tmp_46_type 1195 +#define _loop1_47_type 1196 +#define _tmp_48_type 1197 +#define _tmp_49_type 1198 +#define _tmp_50_type 1199 +#define _tmp_51_type 1200 +#define _tmp_52_type 1201 +#define _loop0_53_type 1202 +#define _loop0_54_type 1203 +#define _loop0_55_type 1204 +#define _loop1_56_type 1205 +#define _loop0_57_type 1206 +#define _loop1_58_type 1207 +#define _loop1_59_type 1208 +#define _loop1_60_type 1209 +#define _loop0_61_type 1210 +#define _loop1_62_type 1211 +#define _loop0_63_type 1212 +#define _loop1_64_type 1213 +#define _loop0_65_type 1214 +#define _loop1_66_type 1215 +#define _loop1_67_type 1216 +#define _tmp_68_type 1217 +#define _loop0_70_type 1218 +#define _gather_69_type 1219 +#define _loop1_71_type 1220 +#define _loop0_73_type 1221 +#define _gather_72_type 1222 +#define _loop1_74_type 1223 +#define _loop0_75_type 1224 +#define _loop0_76_type 1225 +#define _loop0_77_type 1226 +#define _loop1_78_type 1227 +#define _loop0_79_type 1228 +#define _loop1_80_type 1229 +#define _loop1_81_type 1230 +#define _loop1_82_type 1231 +#define _loop0_83_type 1232 +#define _loop1_84_type 1233 +#define _loop0_85_type 1234 +#define _loop1_86_type 1235 +#define _loop0_87_type 1236 +#define _loop1_88_type 1237 +#define _loop1_89_type 1238 +#define _loop1_90_type 1239 +#define _loop1_91_type 1240 +#define _tmp_92_type 1241 +#define _loop0_94_type 1242 +#define _gather_93_type 1243 +#define _tmp_95_type 1244 +#define _tmp_96_type 1245 +#define _tmp_97_type 1246 +#define _tmp_98_type 1247 +#define _loop1_99_type 1248 +#define _tmp_100_type 1249 +#define _tmp_101_type 1250 +#define _loop0_103_type 1251 +#define _gather_102_type 1252 +#define _loop1_104_type 1253 +#define _loop0_105_type 1254 +#define _loop0_106_type 1255 +#define _tmp_107_type 1256 +#define _tmp_108_type 1257 +#define _loop0_110_type 1258 +#define _gather_109_type 1259 +#define _loop0_112_type 1260 +#define _gather_111_type 1261 +#define _loop0_114_type 1262 +#define _gather_113_type 1263 +#define _loop0_116_type 1264 +#define _gather_115_type 1265 +#define _loop0_117_type 1266 +#define _loop0_119_type 1267 +#define _gather_118_type 1268 +#define _tmp_120_type 1269 +#define _loop0_122_type 1270 +#define _gather_121_type 1271 +#define _loop0_124_type 1272 +#define _gather_123_type 1273 +#define _tmp_125_type 1274 +#define _tmp_126_type 1275 +#define _tmp_127_type 1276 +#define _tmp_128_type 1277 +#define _tmp_129_type 1278 +#define _loop0_130_type 1279 +#define _tmp_131_type 1280 +#define _tmp_132_type 1281 +#define _tmp_133_type 1282 +#define _tmp_134_type 1283 +#define _tmp_135_type 1284 +#define _tmp_136_type 1285 +#define _tmp_137_type 1286 +#define _tmp_138_type 1287 +#define _tmp_139_type 1288 +#define _tmp_140_type 1289 +#define _tmp_141_type 1290 +#define _tmp_142_type 1291 +#define _tmp_143_type 1292 +#define _tmp_144_type 1293 +#define _loop1_145_type 1294 +#define _tmp_146_type 1295 +#define _tmp_147_type 1296 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -507,6 +513,8 @@ static void *invalid_assignment_rule(Parser *p); static void *invalid_block_rule(Parser *p); static void *invalid_comprehension_rule(Parser *p); static void *invalid_parameters_rule(Parser *p); +static void *invalid_star_etc_rule(Parser *p); +static void *invalid_lambda_star_etc_rule(Parser *p); static void *invalid_double_type_comments_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); @@ -650,7 +658,11 @@ static void *_tmp_139_rule(Parser *p); static void *_tmp_140_rule(Parser *p); static void *_tmp_141_rule(Parser *p); static void *_tmp_142_rule(Parser *p); -static asdl_seq *_loop1_143_rule(Parser *p); +static void *_tmp_143_rule(Parser *p); +static void *_tmp_144_rule(Parser *p); +static asdl_seq *_loop1_145_rule(Parser *p); +static void *_tmp_146_rule(Parser *p); +static void *_tmp_147_rule(Parser *p); // file: statements? $ @@ -4064,6 +4076,7 @@ slash_with_default_rule(Parser *p) // | '*' param_no_default param_maybe_default* kwds? // | '*' ',' param_maybe_default+ kwds? // | kwds +// | invalid_star_etc static StarEtc* star_etc_rule(Parser *p) { @@ -4135,6 +4148,17 @@ star_etc_rule(Parser *p) } p->mark = mark; } + { // invalid_star_etc + void *invalid_star_etc_var; + if ( + (invalid_star_etc_var = invalid_star_etc_rule(p)) + ) + { + res = invalid_star_etc_var; + goto done; + } + p->mark = mark; + } res = NULL; done: return res; @@ -5512,6 +5536,7 @@ lambda_slash_with_default_rule(Parser *p) // | '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? // | '*' ',' lambda_param_maybe_default+ lambda_kwds? // | lambda_kwds +// | invalid_lambda_star_etc static StarEtc* lambda_star_etc_rule(Parser *p) { @@ -5583,6 +5608,17 @@ lambda_star_etc_rule(Parser *p) } p->mark = mark; } + { // invalid_lambda_star_etc + void *invalid_lambda_star_etc_var; + if ( + (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) + ) + { + res = invalid_lambda_star_etc_var; + goto done; + } + p->mark = mark; + } res = NULL; done: return res; @@ -10757,6 +10793,70 @@ invalid_parameters_rule(Parser *p) return res; } +// invalid_star_etc: '*' (')' | ',' (')' | '**')) +static void * +invalid_star_etc_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * res = NULL; + int mark = p->mark; + { // '*' (')' | ',' (')' | '**')) + void *_tmp_132_var; + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 16)) + && + (_tmp_132_var = _tmp_132_rule(p)) + ) + { + res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + res = NULL; + done: + return res; +} + +// invalid_lambda_star_etc: '*' (':' | ',' (':' | '**')) +static void * +invalid_lambda_star_etc_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * res = NULL; + int mark = p->mark; + { // '*' (':' | ',' (':' | '**')) + void *_tmp_133_var; + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 16)) + && + (_tmp_133_var = _tmp_133_rule(p)) + ) + { + res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + res = NULL; + done: + return res; +} + // invalid_double_type_comments: TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT static void * invalid_double_type_comments_rule(Parser *p) @@ -11693,12 +11793,12 @@ _loop1_22_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // (star_targets '=') - void *_tmp_132_var; + void *_tmp_134_var; while ( - (_tmp_132_var = _tmp_132_rule(p)) + (_tmp_134_var = _tmp_134_rule(p)) ) { - res = _tmp_132_var; + res = _tmp_134_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -12020,12 +12120,12 @@ _loop0_30_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('.' | '...') - void *_tmp_133_var; + void *_tmp_135_var; while ( - (_tmp_133_var = _tmp_133_rule(p)) + (_tmp_135_var = _tmp_135_rule(p)) ) { - res = _tmp_133_var; + res = _tmp_135_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -12069,12 +12169,12 @@ _loop1_31_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('.' | '...') - void *_tmp_134_var; + void *_tmp_136_var; while ( - (_tmp_134_var = _tmp_134_rule(p)) + (_tmp_136_var = _tmp_136_rule(p)) ) { - res = _tmp_134_var; + res = _tmp_136_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -13651,12 +13751,12 @@ _loop1_67_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('@' named_expression NEWLINE) - void *_tmp_135_var; + void *_tmp_137_var; while ( - (_tmp_135_var = _tmp_135_rule(p)) + (_tmp_137_var = _tmp_137_rule(p)) ) { - res = _tmp_135_var; + res = _tmp_137_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -13824,12 +13924,12 @@ _loop1_71_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // (',' star_expression) - void *_tmp_136_var; + void *_tmp_138_var; while ( - (_tmp_136_var = _tmp_136_rule(p)) + (_tmp_138_var = _tmp_138_rule(p)) ) { - res = _tmp_136_var; + res = _tmp_138_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -13962,12 +14062,12 @@ _loop1_74_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // (',' expression) - void *_tmp_137_var; + void *_tmp_139_var; while ( - (_tmp_137_var = _tmp_137_rule(p)) + (_tmp_139_var = _tmp_139_rule(p)) ) { - res = _tmp_137_var; + res = _tmp_139_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -14729,12 +14829,12 @@ _loop1_89_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('or' conjunction) - void *_tmp_138_var; + void *_tmp_140_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) + (_tmp_140_var = _tmp_140_rule(p)) ) { - res = _tmp_138_var; + res = _tmp_140_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -14782,12 +14882,12 @@ _loop1_90_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('and' inversion) - void *_tmp_139_var; + void *_tmp_141_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) + (_tmp_141_var = _tmp_141_rule(p)) ) { - res = _tmp_139_var; + res = _tmp_141_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -15437,12 +15537,12 @@ _loop0_105_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('if' disjunction) - void *_tmp_140_var; + void *_tmp_142_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) + (_tmp_142_var = _tmp_142_rule(p)) ) { - res = _tmp_140_var; + res = _tmp_142_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -15486,12 +15586,12 @@ _loop0_106_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // ('if' disjunction) - void *_tmp_141_var; + void *_tmp_143_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) + (_tmp_143_var = _tmp_143_rule(p)) ) { - res = _tmp_141_var; + res = _tmp_143_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -15939,12 +16039,12 @@ _loop0_117_rule(Parser *p) ssize_t children_capacity = 1; ssize_t n = 0; { // (',' star_target) - void *_tmp_142_var; + void *_tmp_144_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) + (_tmp_144_var = _tmp_144_rule(p)) ) { - res = _tmp_142_var; + res = _tmp_144_var; if (n == children_capacity) { children_capacity *= 2; children = PyMem_Realloc(children, children_capacity*sizeof(void *)); @@ -16508,12 +16608,12 @@ _tmp_131_rule(Parser *p) p->mark = mark; } { // param_with_default+ - asdl_seq * _loop1_143_var; + asdl_seq * _loop1_145_var; if ( - (_loop1_143_var = _loop1_143_rule(p)) + (_loop1_145_var = _loop1_145_rule(p)) ) { - res = _loop1_143_var; + res = _loop1_145_var; goto done; } p->mark = mark; @@ -16523,9 +16623,87 @@ _tmp_131_rule(Parser *p) return res; } -// _tmp_132: star_targets '=' +// _tmp_132: ')' | ',' (')' | '**') static void * _tmp_132_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * res = NULL; + int mark = p->mark; + { // ')' + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 8)) + ) + { + res = literal; + goto done; + } + p->mark = mark; + } + { // ',' (')' | '**') + void *_tmp_146_var; + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 12)) + && + (_tmp_146_var = _tmp_146_rule(p)) + ) + { + res = _PyPegen_dummy_name(p, literal, _tmp_146_var); + goto done; + } + p->mark = mark; + } + res = NULL; + done: + return res; +} + +// _tmp_133: ':' | ',' (':' | '**') +static void * +_tmp_133_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * res = NULL; + int mark = p->mark; + { // ':' + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 11)) + ) + { + res = literal; + goto done; + } + p->mark = mark; + } + { // ',' (':' | '**') + void *_tmp_147_var; + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 12)) + && + (_tmp_147_var = _tmp_147_rule(p)) + ) + { + res = _PyPegen_dummy_name(p, literal, _tmp_147_var); + goto done; + } + p->mark = mark; + } + res = NULL; + done: + return res; +} + +// _tmp_134: star_targets '=' +static void * +_tmp_134_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16555,9 +16733,9 @@ _tmp_132_rule(Parser *p) return res; } -// _tmp_133: '.' | '...' +// _tmp_135: '.' | '...' static void * -_tmp_133_rule(Parser *p) +_tmp_135_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16591,9 +16769,9 @@ _tmp_133_rule(Parser *p) return res; } -// _tmp_134: '.' | '...' +// _tmp_136: '.' | '...' static void * -_tmp_134_rule(Parser *p) +_tmp_136_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16627,9 +16805,9 @@ _tmp_134_rule(Parser *p) return res; } -// _tmp_135: '@' named_expression NEWLINE +// _tmp_137: '@' named_expression NEWLINE static void * -_tmp_135_rule(Parser *p) +_tmp_137_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16662,9 +16840,9 @@ _tmp_135_rule(Parser *p) return res; } -// _tmp_136: ',' star_expression +// _tmp_138: ',' star_expression static void * -_tmp_136_rule(Parser *p) +_tmp_138_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16694,9 +16872,9 @@ _tmp_136_rule(Parser *p) return res; } -// _tmp_137: ',' expression +// _tmp_139: ',' expression static void * -_tmp_137_rule(Parser *p) +_tmp_139_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16726,9 +16904,9 @@ _tmp_137_rule(Parser *p) return res; } -// _tmp_138: 'or' conjunction +// _tmp_140: 'or' conjunction static void * -_tmp_138_rule(Parser *p) +_tmp_140_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16758,9 +16936,9 @@ _tmp_138_rule(Parser *p) return res; } -// _tmp_139: 'and' inversion +// _tmp_141: 'and' inversion static void * -_tmp_139_rule(Parser *p) +_tmp_141_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16790,9 +16968,9 @@ _tmp_139_rule(Parser *p) return res; } -// _tmp_140: 'if' disjunction +// _tmp_142: 'if' disjunction static void * -_tmp_140_rule(Parser *p) +_tmp_142_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16822,9 +17000,9 @@ _tmp_140_rule(Parser *p) return res; } -// _tmp_141: 'if' disjunction +// _tmp_143: 'if' disjunction static void * -_tmp_141_rule(Parser *p) +_tmp_143_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16854,9 +17032,9 @@ _tmp_141_rule(Parser *p) return res; } -// _tmp_142: ',' star_target +// _tmp_144: ',' star_target static void * -_tmp_142_rule(Parser *p) +_tmp_144_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16886,9 +17064,9 @@ _tmp_142_rule(Parser *p) return res; } -// _loop1_143: param_with_default +// _loop1_145: param_with_default static asdl_seq * -_loop1_143_rule(Parser *p) +_loop1_145_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16929,16 +17107,88 @@ _loop1_143_rule(Parser *p) } asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_143"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_145"); PyMem_Free(children); return NULL; } for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_143_type, seq); + _PyPegen_insert_memo(p, start_mark, _loop1_145_type, seq); return seq; } +// _tmp_146: ')' | '**' +static void * +_tmp_146_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * res = NULL; + int mark = p->mark; + { // ')' + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 8)) + ) + { + res = literal; + goto done; + } + p->mark = mark; + } + { // '**' + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 35)) + ) + { + res = literal; + goto done; + } + p->mark = mark; + } + res = NULL; + done: + return res; +} + +// _tmp_147: ':' | '**' +static void * +_tmp_147_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * res = NULL; + int mark = p->mark; + { // ':' + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 11)) + ) + { + res = literal; + goto done; + } + p->mark = mark; + } + { // '**' + Token * literal; + if ( + (literal = _PyPegen_expect_token(p, 35)) + ) + { + res = literal; + goto done; + } + p->mark = mark; + } + res = NULL; + done: + return res; +} + void * _PyPegen_parse(Parser *p) { From webhook-mailer at python.org Mon May 4 07:32:41 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Mon, 04 May 2020 11:32:41 -0000 Subject: [Python-checkins] bpo-40246: Revert reporting of invalid string prefixes (GH-19888) Message-ID: https://github.com/python/cpython/commit/846d8b28ab9bb6197ee81372820311c0abe509c0 commit: 846d8b28ab9bb6197ee81372820311c0abe509c0 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-04T12:32:18+01:00 summary: bpo-40246: Revert reporting of invalid string prefixes (GH-19888) Due to backwards compatibility concerns regarding keywords immediately followed by a string without whitespace between them (like in `bg="#d00" if clear else"#fca"`) will fail to parse, commit 41d5b94af44e34ac05d4cd57460ed104ccf96628 has to be reverted. files: A Misc/NEWS.d/next/Core and Builtins/2020-05-03-23-28-11.bpo-40246.c1D7x8.rst M Include/errcode.h M Lib/test/test_fstring.py M Parser/pegen/pegen.c M Parser/tokenizer.c M Python/pythonrun.c diff --git a/Include/errcode.h b/Include/errcode.h index 9af8d5c03d59b..b37cd261d5ec4 100644 --- a/Include/errcode.h +++ b/Include/errcode.h @@ -31,7 +31,6 @@ extern "C" { #define E_LINECONT 25 /* Unexpected characters after a line continuation */ #define E_IDENTIFIER 26 /* Invalid characters in identifier */ #define E_BADSINGLE 27 /* Ill-formed single statement input */ -#define E_BADPREFIX 28 /* Bad string prefixes */ #ifdef __cplusplus } diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index fe465b7e1d43d..ac5aa9a76efe7 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -864,7 +864,7 @@ def test_invalid_string_prefixes(self): "Bf''", "BF''",] double_quote_cases = [case.replace("'", '"') for case in single_quote_cases] - self.assertAllRaise(SyntaxError, 'invalid string prefix', + self.assertAllRaise(SyntaxError, 'unexpected EOF while parsing', single_quote_cases + double_quote_cases) def test_leading_trailing_spaces(self): diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-03-23-28-11.bpo-40246.c1D7x8.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-03-23-28-11.bpo-40246.c1D7x8.rst new file mode 100644 index 0000000000000..62cd632ffd070 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-03-23-28-11.bpo-40246.c1D7x8.rst @@ -0,0 +1 @@ +Reporting a specialised error message for invalid string prefixes, which was introduced in :issue:`40246`, is being reverted due to backwards compatibility concerns for strings that immediately follow a reserved keyword without whitespace between them. Constructs like `bg="#d00" if clear else"#fca"` were failing to parse, which is not an acceptable breakage on such short notice. diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 391f9b91eab90..c311593af70f5 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -334,9 +334,6 @@ tokenizer_error(Parser *p) case E_IDENTIFIER: msg = "invalid character in identifier"; break; - case E_BADPREFIX: - RAISE_SYNTAX_ERROR("invalid string prefix"); - return -1; case E_EOFS: RAISE_SYNTAX_ERROR("EOF while scanning triple-quoted string literal"); return -1; diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 95dfc5388037d..0f2b6af5e50ad 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -1396,10 +1396,6 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) *p_start = tok->start; *p_end = tok->cur; - if (c == '"' || c == '\'') { - tok->done = E_BADPREFIX; - return ERRORTOKEN; - } /* async/await parsing block. */ if (tok->cur - tok->start == 5 && tok->start[0] == 'a') { /* May be an 'async' or 'await' token. For Python 3.7 or diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 79147e430a1ad..1b79a33c814da 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -1609,9 +1609,6 @@ err_input(perrdetail *err) case E_BADSINGLE: msg = "multiple statements found while compiling a single statement"; break; - case E_BADPREFIX: - msg = "invalid string prefix"; - break; default: fprintf(stderr, "error=%d\n", err->error); msg = "unknown parsing error"; From webhook-mailer at python.org Mon May 4 09:32:02 2020 From: webhook-mailer at python.org (Hai Shi) Date: Mon, 04 May 2020 13:32:02 -0000 Subject: [Python-checkins] bpo-39573: Use Py_IS_TYPE to check for types (GH-19882) Message-ID: https://github.com/python/cpython/commit/5e8ffe147710e449c2e935a4e2ff5cbd19828a8a commit: 5e8ffe147710e449c2e935a4e2ff5cbd19828a8a branch: master author: Hai Shi committer: GitHub date: 2020-05-04T22:31:38+09:00 summary: bpo-39573: Use Py_IS_TYPE to check for types (GH-19882) files: M Objects/genericaliasobject.c M Objects/typeobject.c diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c index c06d79c556190..4d511a239063c 100644 --- a/Objects/genericaliasobject.c +++ b/Objects/genericaliasobject.c @@ -424,8 +424,8 @@ ga_getattro(PyObject *self, PyObject *name) static PyObject * ga_richcompare(PyObject *a, PyObject *b, int op) { - if (Py_TYPE(a) != &Py_GenericAliasType || - Py_TYPE(b) != &Py_GenericAliasType || + if (!Py_IS_TYPE(a, &Py_GenericAliasType) || + !Py_IS_TYPE(b, &Py_GenericAliasType) || (op != Py_EQ && op != Py_NE)) { Py_RETURN_NOTIMPLEMENTED; diff --git a/Objects/typeobject.c b/Objects/typeobject.c index c2ddc162ac82c..db0ae970090ba 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -6306,7 +6306,7 @@ FUNCNAME(PyObject *self, PyObject *other) \ stack[1] = other; \ r = vectorcall_maybe(tstate, &op_id, stack, 2); \ if (r != Py_NotImplemented || \ - Py_TYPE(other) == Py_TYPE(self)) \ + Py_IS_TYPE(other, Py_TYPE(self))) \ return r; \ Py_DECREF(r); \ } \ From webhook-mailer at python.org Mon May 4 09:32:50 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Mon, 04 May 2020 13:32:50 -0000 Subject: [Python-checkins] bpo-40455: Remove gcc10 warning about x_digits (#19852) Message-ID: https://github.com/python/cpython/commit/b88cd585d36d6285a5aeb0b6fdb70c134062181e commit: b88cd585d36d6285a5aeb0b6fdb70c134062181e branch: master author: Dong-hee Na committer: GitHub date: 2020-05-04T22:32:42+09:00 summary: bpo-40455: Remove gcc10 warning about x_digits (#19852) * bpo-40455: Remove gcc10 warning about x_digits * bpo-40455: nit * bpo-40455: fix logic error files: M Objects/longobject.c diff --git a/Objects/longobject.c b/Objects/longobject.c index a0bb6bc52be02..11fc75b918f77 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -2852,7 +2852,8 @@ _PyLong_Frexp(PyLongObject *a, Py_ssize_t *e) { Py_ssize_t a_size, a_bits, shift_digits, shift_bits, x_size; /* See below for why x_digits is always large enough. */ - digit rem, x_digits[2 + (DBL_MANT_DIG + 1) / PyLong_SHIFT]; + digit rem; + digit x_digits[2 + (DBL_MANT_DIG + 1) / PyLong_SHIFT] = {0,}; double dx; /* Correction term for round-half-to-even rounding. For a digit x, "x + half_even_correction[x & 7]" gives x rounded to the nearest @@ -2902,9 +2903,7 @@ _PyLong_Frexp(PyLongObject *a, Py_ssize_t *e) if (a_bits <= DBL_MANT_DIG + 2) { shift_digits = (DBL_MANT_DIG + 2 - a_bits) / PyLong_SHIFT; shift_bits = (DBL_MANT_DIG + 2 - a_bits) % PyLong_SHIFT; - x_size = 0; - while (x_size < shift_digits) - x_digits[x_size++] = 0; + x_size = shift_digits; rem = v_lshift(x_digits + x_size, a->ob_digit, a_size, (int)shift_bits); x_size += a_size; From webhook-mailer at python.org Mon May 4 11:06:03 2020 From: webhook-mailer at python.org (Hai Shi) Date: Mon, 04 May 2020 15:06:03 -0000 Subject: [Python-checkins] bpo-40135: Fix multiprocessing test_shared_memory_across_processes() (GH-19892) Message-ID: https://github.com/python/cpython/commit/caa3ef284a2e5e5b9bdd6a9e619804122c842d80 commit: caa3ef284a2e5e5b9bdd6a9e619804122c842d80 branch: master author: Hai Shi committer: GitHub date: 2020-05-04T17:05:54+02:00 summary: bpo-40135: Fix multiprocessing test_shared_memory_across_processes() (GH-19892) Don't define shared memory block's name in test_shared_memory_across_processes(): use SharedMemory(create=True) instead. files: M Lib/test/_test_multiprocessing.py diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index dd894f21f7afc..dc8164f3288e1 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -3860,7 +3860,9 @@ class OptionalAttachSharedMemory(shared_memory.SharedMemory): sms.close() def test_shared_memory_across_processes(self): - sms = shared_memory.SharedMemory('test02_tsmap', True, size=512) + # bpo-40135: don't define shared memory block's name in case of + # the failure when we run multiprocessing tests in parallel. + sms = shared_memory.SharedMemory(create=True, size=512) self.addCleanup(sms.unlink) # Verify remote attachment to existing block by name is working. From webhook-mailer at python.org Mon May 4 11:25:30 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 04 May 2020 15:25:30 -0000 Subject: [Python-checkins] bpo-40135: Fix multiprocessing test_shared_memory_across_processes() (GH-19892) Message-ID: https://github.com/python/cpython/commit/70fe95cdc9ac1b00d4f86b7525dca80caf7003e1 commit: 70fe95cdc9ac1b00d4f86b7525dca80caf7003e1 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-04T08:25:22-07:00 summary: bpo-40135: Fix multiprocessing test_shared_memory_across_processes() (GH-19892) Don't define shared memory block's name in test_shared_memory_across_processes(): use SharedMemory(create=True) instead. (cherry picked from commit caa3ef284a2e5e5b9bdd6a9e619804122c842d80) Co-authored-by: Hai Shi files: M Lib/test/_test_multiprocessing.py diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 5943dd83cc143..ff58481f00314 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -3828,7 +3828,9 @@ class OptionalAttachSharedMemory(shared_memory.SharedMemory): sms.close() def test_shared_memory_across_processes(self): - sms = shared_memory.SharedMemory('test02_tsmap', True, size=512) + # bpo-40135: don't define shared memory block's name in case of + # the failure when we run multiprocessing tests in parallel. + sms = shared_memory.SharedMemory(create=True, size=512) self.addCleanup(sms.unlink) # Verify remote attachment to existing block by name is working. From webhook-mailer at python.org Mon May 4 13:30:50 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Mon, 04 May 2020 17:30:50 -0000 Subject: [Python-checkins] bpo-40489: Add test case for dict contain use after free (GH-19906) Message-ID: https://github.com/python/cpython/commit/785f5e6d674306052bf865677d885c30561985ae commit: 785f5e6d674306052bf865677d885c30561985ae branch: master author: Dong-hee Na committer: GitHub date: 2020-05-05T02:30:42+09:00 summary: bpo-40489: Add test case for dict contain use after free (GH-19906) files: M Lib/test/test_dict.py diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index d5a3d9e894574..6b8596fff6a9f 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -1324,6 +1324,19 @@ def __eq__(self, other): d = {0: set()} (0, X()) in d.items() + def test_dict_contain_use_after_free(self): + # bpo-40489 + class S(str): + def __eq__(self, other): + d.clear() + return NotImplemented + + def __hash__(self): + return hash('test') + + d = {S(): 'value'} + self.assertFalse('test' in d) + def test_init_use_after_free(self): class X: def __hash__(self): From webhook-mailer at python.org Mon May 4 14:05:10 2020 From: webhook-mailer at python.org (Hai Shi) Date: Mon, 04 May 2020 18:05:10 -0000 Subject: [Python-checkins] bpo-40275: test.support imports lazily locale import (GH-19761) Message-ID: https://github.com/python/cpython/commit/975408c065b645e7d717546b0d744415abb45cd1 commit: 975408c065b645e7d717546b0d744415abb45cd1 branch: master author: Hai Shi committer: GitHub date: 2020-05-04T20:05:02+02:00 summary: bpo-40275: test.support imports lazily locale import (GH-19761) files: M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index bd2157496fe00..d2418282afc11 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -13,7 +13,6 @@ import glob import importlib import importlib.util -import locale import os import platform import re @@ -2311,6 +2310,7 @@ def skip_if_buggy_ucrt_strfptime(test): See bpo-37552 [Windows] strptime/strftime return invalid results with UCRT version 17763.615 """ + import locale global _buggy_ucrt if _buggy_ucrt is None: if(sys.platform == 'win32' and From webhook-mailer at python.org Mon May 4 15:47:10 2020 From: webhook-mailer at python.org (Joannah Nanjekye) Date: Mon, 04 May 2020 19:47:10 -0000 Subject: [Python-checkins] bpo-39470: Indicate that ``os.makedirs`` is equivalent to ``Path.mkdir`` (GH-18216) Message-ID: https://github.com/python/cpython/commit/f25fb6ebfec894c01bc927c9aae7924ffc826d11 commit: f25fb6ebfec894c01bc927c9aae7924ffc826d11 branch: master author: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> committer: GitHub date: 2020-05-04T16:47:03-03:00 summary: bpo-39470: Indicate that ``os.makedirs`` is equivalent to ``Path.mkdir`` (GH-18216) * Indicate os.makedirs is equivalent to Path.mkdir * ?? Added by blurb_it. * ignore news feed Co-authored-by: nanjekyejoannah Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> files: M Doc/library/pathlib.rst diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index dead49b630dcd..83f7c836f0e71 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -1185,6 +1185,7 @@ os and os.path pathlib :func:`os.path.abspath` :meth:`Path.resolve` :func:`os.chmod` :meth:`Path.chmod` :func:`os.mkdir` :meth:`Path.mkdir` +:func:`os.makedirs` :meth:`Path.mkdir` :func:`os.rename` :meth:`Path.rename` :func:`os.replace` :meth:`Path.replace` :func:`os.rmdir` :meth:`Path.rmdir` From webhook-mailer at python.org Mon May 4 17:56:08 2020 From: webhook-mailer at python.org (Joel Rosdahl) Date: Mon, 04 May 2020 21:56:08 -0000 Subject: [Python-checkins] bpo-40499: Mention that asyncio.wait() needs a non-empty aws set (GH-19900) Message-ID: https://github.com/python/cpython/commit/9d74658f0a6e8a9b8d6dcf199dda886f35c6ad68 commit: 9d74658f0a6e8a9b8d6dcf199dda886f35c6ad68 branch: master author: Joel Rosdahl committer: GitHub date: 2020-05-04T14:56:00-07:00 summary: bpo-40499: Mention that asyncio.wait() needs a non-empty aws set (GH-19900) A similar formulation was added in bpo-21596 (db74d982d43d98040e38665d843cbc8de4a082b1) but was lost in bpo-33649 (3faaa8857a42a36383bb18425444e597fc876797). files: A Misc/NEWS.d/next/Documentation/2020-05-04-14-20-02.bpo-40499.tjLSo8.rst M Doc/library/asyncio-task.rst M Misc/ACKS diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index d992b0011dc66..6627bec79823a 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -498,6 +498,8 @@ Waiting Primitives set concurrently and block until the condition specified by *return_when*. + The *aws* set must not be empty. + Returns two sets of Tasks/Futures: ``(done, pending)``. Usage:: diff --git a/Misc/ACKS b/Misc/ACKS index 9221f6aae439e..f744de6b1f66d 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1443,6 +1443,7 @@ Mike Romberg Armin Ronacher Case Roole Timothy Roscoe +Joel Rosdahl Erik Rose Mark Roseman Josh Rosenberg diff --git a/Misc/NEWS.d/next/Documentation/2020-05-04-14-20-02.bpo-40499.tjLSo8.rst b/Misc/NEWS.d/next/Documentation/2020-05-04-14-20-02.bpo-40499.tjLSo8.rst new file mode 100644 index 0000000000000..2b7eccbf0efaf --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-04-14-20-02.bpo-40499.tjLSo8.rst @@ -0,0 +1 @@ +Mention that :func:`asyncio.wait` requires a non-empty set of awaitables. From webhook-mailer at python.org Mon May 4 21:49:53 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Tue, 05 May 2020 01:49:53 -0000 Subject: [Python-checkins] bpo-1635741: Port syslog module to multiphase initialization (GH-19907) Message-ID: https://github.com/python/cpython/commit/92a98ed97513c6e365ce8765550ea65d0ddc8cd7 commit: 92a98ed97513c6e365ce8765550ea65d0ddc8cd7 branch: master author: Dong-hee Na committer: GitHub date: 2020-05-05T10:49:46+09:00 summary: bpo-1635741: Port syslog module to multiphase initialization (GH-19907) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-05-03-36-27.bpo-1635741.ARv1YV.rst M Modules/syslogmodule.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-03-36-27.bpo-1635741.ARv1YV.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-03-36-27.bpo-1635741.ARv1YV.rst new file mode 100644 index 0000000000000..f484992c487bd --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-03-36-27.bpo-1635741.ARv1YV.rst @@ -0,0 +1 @@ +Port :mod:`syslog` to multiphase initialization (:pep:`489`). diff --git a/Modules/syslogmodule.c b/Modules/syslogmodule.c index 11718e277432f..cdc94a60a373d 100644 --- a/Modules/syslogmodule.c +++ b/Modules/syslogmodule.c @@ -261,72 +261,55 @@ static PyMethodDef syslog_methods[] = { {NULL, NULL, 0} }; -/* Initialization function for the module */ - - -static struct PyModuleDef syslogmodule = { - PyModuleDef_HEAD_INIT, - "syslog", - NULL, - -1, - syslog_methods, - NULL, - NULL, - NULL, - NULL -}; -PyMODINIT_FUNC -PyInit_syslog(void) +static int +syslog_exec(PyObject *module) { - PyObject *m; - - /* Create the module and add the functions */ - m = PyModule_Create(&syslogmodule); - if (m == NULL) - return NULL; - - /* Add some symbolic constants to the module */ - +#define ADD_INT_MACRO(module, macro) \ + do { \ + if (PyModule_AddIntConstant(module, #macro, macro) < 0) { \ + return -1; \ + } \ + } while (0) /* Priorities */ - PyModule_AddIntMacro(m, LOG_EMERG); - PyModule_AddIntMacro(m, LOG_ALERT); - PyModule_AddIntMacro(m, LOG_CRIT); - PyModule_AddIntMacro(m, LOG_ERR); - PyModule_AddIntMacro(m, LOG_WARNING); - PyModule_AddIntMacro(m, LOG_NOTICE); - PyModule_AddIntMacro(m, LOG_INFO); - PyModule_AddIntMacro(m, LOG_DEBUG); + ADD_INT_MACRO(module, LOG_EMERG); + ADD_INT_MACRO(module, LOG_ALERT); + ADD_INT_MACRO(module, LOG_CRIT); + ADD_INT_MACRO(module, LOG_ERR); + ADD_INT_MACRO(module, LOG_WARNING); + ADD_INT_MACRO(module, LOG_NOTICE); + ADD_INT_MACRO(module, LOG_INFO); + ADD_INT_MACRO(module, LOG_DEBUG); /* openlog() option flags */ - PyModule_AddIntMacro(m, LOG_PID); - PyModule_AddIntMacro(m, LOG_CONS); - PyModule_AddIntMacro(m, LOG_NDELAY); + ADD_INT_MACRO(module, LOG_PID); + ADD_INT_MACRO(module, LOG_CONS); + ADD_INT_MACRO(module, LOG_NDELAY); #ifdef LOG_ODELAY - PyModule_AddIntMacro(m, LOG_ODELAY); + ADD_INT_MACRO(module, LOG_ODELAY); #endif #ifdef LOG_NOWAIT - PyModule_AddIntMacro(m, LOG_NOWAIT); + ADD_INT_MACRO(module, LOG_NOWAIT); #endif #ifdef LOG_PERROR - PyModule_AddIntMacro(m, LOG_PERROR); + ADD_INT_MACRO(module, LOG_PERROR); #endif /* Facilities */ - PyModule_AddIntMacro(m, LOG_KERN); - PyModule_AddIntMacro(m, LOG_USER); - PyModule_AddIntMacro(m, LOG_MAIL); - PyModule_AddIntMacro(m, LOG_DAEMON); - PyModule_AddIntMacro(m, LOG_AUTH); - PyModule_AddIntMacro(m, LOG_LPR); - PyModule_AddIntMacro(m, LOG_LOCAL0); - PyModule_AddIntMacro(m, LOG_LOCAL1); - PyModule_AddIntMacro(m, LOG_LOCAL2); - PyModule_AddIntMacro(m, LOG_LOCAL3); - PyModule_AddIntMacro(m, LOG_LOCAL4); - PyModule_AddIntMacro(m, LOG_LOCAL5); - PyModule_AddIntMacro(m, LOG_LOCAL6); - PyModule_AddIntMacro(m, LOG_LOCAL7); + ADD_INT_MACRO(module, LOG_KERN); + ADD_INT_MACRO(module, LOG_USER); + ADD_INT_MACRO(module, LOG_MAIL); + ADD_INT_MACRO(module, LOG_DAEMON); + ADD_INT_MACRO(module, LOG_AUTH); + ADD_INT_MACRO(module, LOG_LPR); + ADD_INT_MACRO(module, LOG_LOCAL0); + ADD_INT_MACRO(module, LOG_LOCAL1); + ADD_INT_MACRO(module, LOG_LOCAL2); + ADD_INT_MACRO(module, LOG_LOCAL3); + ADD_INT_MACRO(module, LOG_LOCAL4); + ADD_INT_MACRO(module, LOG_LOCAL5); + ADD_INT_MACRO(module, LOG_LOCAL6); + ADD_INT_MACRO(module, LOG_LOCAL7); #ifndef LOG_SYSLOG #define LOG_SYSLOG LOG_DAEMON @@ -341,14 +324,35 @@ PyInit_syslog(void) #define LOG_CRON LOG_DAEMON #endif - PyModule_AddIntMacro(m, LOG_SYSLOG); - PyModule_AddIntMacro(m, LOG_CRON); - PyModule_AddIntMacro(m, LOG_UUCP); - PyModule_AddIntMacro(m, LOG_NEWS); + ADD_INT_MACRO(module, LOG_SYSLOG); + ADD_INT_MACRO(module, LOG_CRON); + ADD_INT_MACRO(module, LOG_UUCP); + ADD_INT_MACRO(module, LOG_NEWS); #ifdef LOG_AUTHPRIV - PyModule_AddIntMacro(m, LOG_AUTHPRIV); + ADD_INT_MACRO(module, LOG_AUTHPRIV); #endif - return m; + return 0; } + +static PyModuleDef_Slot syslog_slots[] = { + {Py_mod_exec, syslog_exec}, + {0, NULL} +}; + +/* Initialization function for the module */ + +static struct PyModuleDef syslogmodule = { + PyModuleDef_HEAD_INIT, + .m_name = "syslog", + .m_size = 0, + .m_methods = syslog_methods, + .m_slots = syslog_slots, +}; + +PyMODINIT_FUNC +PyInit_syslog(void) +{ + return PyModuleDef_Init(&syslogmodule); +} \ No newline at end of file From webhook-mailer at python.org Mon May 4 22:33:24 2020 From: webhook-mailer at python.org (Dennis Sweeney) Date: Tue, 05 May 2020 02:33:24 -0000 Subject: [Python-checkins] bpo-40459: Fix NameError in platform.py (GH-19855) Message-ID: https://github.com/python/cpython/commit/1e7e4519a8ddc2239101a0146d788c9161143a77 commit: 1e7e4519a8ddc2239101a0146d788c9161143a77 branch: master author: Dennis Sweeney <36520290+sweeneyde at users.noreply.github.com> committer: GitHub date: 2020-05-05T11:33:17+09:00 summary: bpo-40459: Fix NameError in platform.py (GH-19855) files: A Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst M Lib/platform.py diff --git a/Lib/platform.py b/Lib/platform.py index 3f442ef0fbb65..049c2c6ef25a1 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -398,9 +398,9 @@ def win32_ver(release='', version='', csd='', ptype=''): else: try: cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion' - with winreg.OpenKeyEx(HKEY_LOCAL_MACHINE, cvkey) as key: - ptype = QueryValueEx(key, 'CurrentType')[0] - except: + with winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, cvkey) as key: + ptype = winreg.QueryValueEx(key, 'CurrentType')[0] + except OSError: pass return release, version, csd, ptype diff --git a/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst b/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst new file mode 100644 index 0000000000000..d4bf6987fa260 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst @@ -0,0 +1 @@ +:func:`platform.win32_ver` now produces correct *ptype* strings instead of empty strings. From webhook-mailer at python.org Mon May 4 22:51:41 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 05 May 2020 02:51:41 -0000 Subject: [Python-checkins] bpo-40459: Fix NameError in platform.py (GH-19855) Message-ID: https://github.com/python/cpython/commit/8ddf91543890e38c76aa0029482c6f5f5c444837 commit: 8ddf91543890e38c76aa0029482c6f5f5c444837 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-04T19:51:33-07:00 summary: bpo-40459: Fix NameError in platform.py (GH-19855) (cherry picked from commit 1e7e4519a8ddc2239101a0146d788c9161143a77) Co-authored-by: Dennis Sweeney <36520290+sweeneyde at users.noreply.github.com> files: A Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst M Lib/platform.py diff --git a/Lib/platform.py b/Lib/platform.py index 6ab06b58321e0..7af46ffd17728 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -570,9 +570,9 @@ def win32_ver(release='', version='', csd='', ptype=''): else: try: cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion' - with winreg.OpenKeyEx(HKEY_LOCAL_MACHINE, cvkey) as key: - ptype = QueryValueEx(key, 'CurrentType')[0] - except: + with winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, cvkey) as key: + ptype = winreg.QueryValueEx(key, 'CurrentType')[0] + except OSError: pass return release, version, csd, ptype diff --git a/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst b/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst new file mode 100644 index 0000000000000..d4bf6987fa260 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst @@ -0,0 +1 @@ +:func:`platform.win32_ver` now produces correct *ptype* strings instead of empty strings. From webhook-mailer at python.org Mon May 4 22:51:53 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 05 May 2020 02:51:53 -0000 Subject: [Python-checkins] bpo-40459: Fix NameError in platform.py (GH-19855) Message-ID: https://github.com/python/cpython/commit/efc782d29e229924076ffb6645a72f26242fb3ef commit: efc782d29e229924076ffb6645a72f26242fb3ef branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-04T19:51:48-07:00 summary: bpo-40459: Fix NameError in platform.py (GH-19855) (cherry picked from commit 1e7e4519a8ddc2239101a0146d788c9161143a77) Co-authored-by: Dennis Sweeney <36520290+sweeneyde at users.noreply.github.com> files: A Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst M Lib/platform.py diff --git a/Lib/platform.py b/Lib/platform.py index 6fbb7b08c598e..994d892c5e616 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -395,9 +395,9 @@ def win32_ver(release='', version='', csd='', ptype=''): else: try: cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion' - with winreg.OpenKeyEx(HKEY_LOCAL_MACHINE, cvkey) as key: - ptype = QueryValueEx(key, 'CurrentType')[0] - except: + with winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, cvkey) as key: + ptype = winreg.QueryValueEx(key, 'CurrentType')[0] + except OSError: pass return release, version, csd, ptype diff --git a/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst b/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst new file mode 100644 index 0000000000000..d4bf6987fa260 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst @@ -0,0 +1 @@ +:func:`platform.win32_ver` now produces correct *ptype* strings instead of empty strings. From webhook-mailer at python.org Tue May 5 01:52:18 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 05 May 2020 05:52:18 -0000 Subject: [Python-checkins] bpo-40286: Put methods in correct sections. Add security notice to use secrets for session tokens. (GH-19870) Message-ID: https://github.com/python/cpython/commit/f01d1be97d740ea0369379ca305646a26694236e commit: f01d1be97d740ea0369379ca305646a26694236e branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-04T22:52:13-07:00 summary: bpo-40286: Put methods in correct sections. Add security notice to use secrets for session tokens. (GH-19870) files: M Doc/library/random.rst diff --git a/Doc/library/random.rst b/Doc/library/random.rst index ab4ca4b3f8532..f37bc2a111d95 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -104,21 +104,16 @@ Bookkeeping functions the time :func:`getstate` was called. -.. function:: getrandbits(k) - - Returns a Python integer with *k* random bits. This method is supplied with - the Mersenne Twister generator and some other generators may also provide it - as an optional part of the API. When available, :meth:`getrandbits` enables - :meth:`randrange` to handle arbitrarily large ranges. - - .. versionchanged:: 3.9 - This method now accepts zero for *k*. - +Functions for bytes +------------------- .. function:: randbytes(n) Generate *n* random bytes. + This method should not be used for generating security tokens. + Use :func:`secrets.token_bytes` instead. + .. versionadded:: 3.9 @@ -145,6 +140,16 @@ Functions for integers Return a random integer *N* such that ``a <= N <= b``. Alias for ``randrange(a, b+1)``. +.. function:: getrandbits(k) + + Returns a Python integer with *k* random bits. This method is supplied with + the MersenneTwister generator and some other generators may also provide it + as an optional part of the API. When available, :meth:`getrandbits` enables + :meth:`randrange` to handle arbitrarily large ranges. + + .. versionchanged:: 3.9 + This method now accepts zero for *k*. + Functions for sequences ----------------------- From webhook-mailer at python.org Tue May 5 09:43:45 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 13:43:45 -0000 Subject: [Python-checkins] bpo-40513: Per-interpreter signals pending (GH-19924) Message-ID: https://github.com/python/cpython/commit/4e01946cafca0cf49f796c3118e0d65237bcad69 commit: 4e01946cafca0cf49f796c3118e0d65237bcad69 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T15:43:37+02:00 summary: bpo-40513: Per-interpreter signals pending (GH-19924) Move signals_pending from _PyRuntime.ceval to PyInterpreterState.ceval. files: M Include/internal/pycore_interp.h M Include/internal/pycore_runtime.h M Python/ceval.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 6e9937caa9dbf..251ee06ed4b00 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -43,6 +43,8 @@ struct _ceval_state { the fast path in the eval loop. */ _Py_atomic_int eval_breaker; struct _pending_calls pending; + /* Request for checking signals. */ + _Py_atomic_int signals_pending; }; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 995fe231c3214..d432c6cc5112a 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -17,8 +17,6 @@ struct _ceval_runtime_state { int recursion_limit; /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; - /* Request for checking signals. */ - _Py_atomic_int signals_pending; struct _gil_runtime_state gil; }; diff --git a/Python/ceval.c b/Python/ceval.c index e15d7e0b4603d..addc0264b171a 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -148,7 +148,7 @@ COMPUTE_EVAL_BREAKER(PyInterpreterState *interp, { _Py_atomic_store_relaxed(&ceval2->eval_breaker, _Py_atomic_load_relaxed(&ceval->gil_drop_request) - | (_Py_atomic_load_relaxed(&ceval->signals_pending) + | (_Py_atomic_load_relaxed(&ceval2->signals_pending) && _Py_ThreadCanHandleSignals(interp)) | (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do) && _Py_ThreadCanHandlePendingCalls()) @@ -201,7 +201,7 @@ SIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { struct _ceval_runtime_state *ceval = &interp->runtime->ceval; struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->signals_pending, 1); + _Py_atomic_store_relaxed(&ceval2->signals_pending, 1); /* eval_breaker is not set to 1 if thread_can_handle_signals() is false */ COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -212,7 +212,7 @@ UNSIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { struct _ceval_runtime_state *ceval = &interp->runtime->ceval; struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->signals_pending, 0); + _Py_atomic_store_relaxed(&ceval2->signals_pending, 0); COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -830,16 +830,16 @@ eval_frame_handle_pending(PyThreadState *tstate) { _PyRuntimeState * const runtime = &_PyRuntime; struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; /* Pending signals */ - if (_Py_atomic_load_relaxed(&ceval->signals_pending)) { + if (_Py_atomic_load_relaxed(&ceval2->signals_pending)) { if (handle_signals(tstate) != 0) { return -1; } } /* Pending calls */ - struct _ceval_state *ceval2 = &tstate->interp->ceval; if (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do)) { if (make_pending_calls(tstate) != 0) { return -1; From webhook-mailer at python.org Tue May 5 10:14:40 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 14:14:40 -0000 Subject: [Python-checkins] bpo-40513: Per-interpreter gil_drop_request (GH-19927) Message-ID: https://github.com/python/cpython/commit/0b1e3307e24b0af45787ab6456535b8346e0239a commit: 0b1e3307e24b0af45787ab6456535b8346e0239a branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T16:14:31+02:00 summary: bpo-40513: Per-interpreter gil_drop_request (GH-19927) Move gil_drop_request member from _PyRuntimeState.ceval to PyInterpreterState.ceval. files: M Include/internal/pycore_interp.h M Include/internal/pycore_runtime.h M Python/ceval.c M Python/ceval_gil.h diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 251ee06ed4b00..fafc72eb97a00 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -42,6 +42,8 @@ struct _ceval_state { /* This single variable consolidates all requests to break out of the fast path in the eval loop. */ _Py_atomic_int eval_breaker; + /* Request for dropping the GIL */ + _Py_atomic_int gil_drop_request; struct _pending_calls pending; /* Request for checking signals. */ _Py_atomic_int signals_pending; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index d432c6cc5112a..c59733559167a 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -15,8 +15,6 @@ extern "C" { struct _ceval_runtime_state { int recursion_limit; - /* Request for dropping the GIL */ - _Py_atomic_int gil_drop_request; struct _gil_runtime_state gil; }; diff --git a/Python/ceval.c b/Python/ceval.c index addc0264b171a..6b002730c8d78 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -143,77 +143,70 @@ is_tstate_valid(PyThreadState *tstate) the GIL eventually anyway. */ static inline void COMPUTE_EVAL_BREAKER(PyInterpreterState *interp, - struct _ceval_runtime_state *ceval, - struct _ceval_state *ceval2) + struct _ceval_state *ceval) { - _Py_atomic_store_relaxed(&ceval2->eval_breaker, + _Py_atomic_store_relaxed(&ceval->eval_breaker, _Py_atomic_load_relaxed(&ceval->gil_drop_request) - | (_Py_atomic_load_relaxed(&ceval2->signals_pending) + | (_Py_atomic_load_relaxed(&ceval->signals_pending) && _Py_ThreadCanHandleSignals(interp)) - | (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do) + | (_Py_atomic_load_relaxed(&ceval->pending.calls_to_do) && _Py_ThreadCanHandlePendingCalls()) - | ceval2->pending.async_exc); + | ceval->pending.async_exc); } static inline void SET_GIL_DROP_REQUEST(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; + struct _ceval_state *ceval = &interp->ceval; _Py_atomic_store_relaxed(&ceval->gil_drop_request, 1); - _Py_atomic_store_relaxed(&ceval2->eval_breaker, 1); + _Py_atomic_store_relaxed(&ceval->eval_breaker, 1); } static inline void RESET_GIL_DROP_REQUEST(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; + struct _ceval_state *ceval = &interp->ceval; _Py_atomic_store_relaxed(&ceval->gil_drop_request, 0); - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + COMPUTE_EVAL_BREAKER(interp, ceval); } static inline void SIGNAL_PENDING_CALLS(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval2->pending.calls_to_do, 1); - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + struct _ceval_state *ceval = &interp->ceval; + _Py_atomic_store_relaxed(&ceval->pending.calls_to_do, 1); + COMPUTE_EVAL_BREAKER(interp, ceval); } static inline void UNSIGNAL_PENDING_CALLS(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval2->pending.calls_to_do, 0); - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + struct _ceval_state *ceval = &interp->ceval; + _Py_atomic_store_relaxed(&ceval->pending.calls_to_do, 0); + COMPUTE_EVAL_BREAKER(interp, ceval); } static inline void SIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval2->signals_pending, 1); + struct _ceval_state *ceval = &interp->ceval; + _Py_atomic_store_relaxed(&ceval->signals_pending, 1); /* eval_breaker is not set to 1 if thread_can_handle_signals() is false */ - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + COMPUTE_EVAL_BREAKER(interp, ceval); } static inline void UNSIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval2->signals_pending, 0); - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + struct _ceval_state *ceval = &interp->ceval; + _Py_atomic_store_relaxed(&ceval->signals_pending, 0); + COMPUTE_EVAL_BREAKER(interp, ceval); } @@ -229,10 +222,9 @@ SIGNAL_ASYNC_EXC(PyInterpreterState *interp) static inline void UNSIGNAL_ASYNC_EXC(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - ceval2->pending.async_exc = 0; - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + struct _ceval_state *ceval = &interp->ceval; + ceval->pending.async_exc = 0; + COMPUTE_EVAL_BREAKER(interp, ceval); } @@ -357,17 +349,19 @@ PyEval_ReleaseLock(void) { _PyRuntimeState *runtime = &_PyRuntime; PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + struct _ceval_state *ceval2 = &tstate->interp->ceval; /* This function must succeed when the current thread state is NULL. We therefore avoid PyThreadState_Get() which dumps a fatal error in debug mode. */ - drop_gil(&runtime->ceval, tstate); + drop_gil(&runtime->ceval, ceval2, tstate); } void _PyEval_ReleaseLock(PyThreadState *tstate) { struct _ceval_runtime_state *ceval = &tstate->interp->runtime->ceval; - drop_gil(ceval, tstate); + struct _ceval_state *ceval2 = &tstate->interp->ceval; + drop_gil(ceval, ceval2, tstate); } void @@ -393,7 +387,9 @@ PyEval_ReleaseThread(PyThreadState *tstate) if (new_tstate != tstate) { Py_FatalError("wrong thread state"); } - drop_gil(&runtime->ceval, tstate); + struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; + drop_gil(ceval, ceval2, tstate); } #ifdef HAVE_FORK @@ -439,13 +435,14 @@ PyThreadState * PyEval_SaveThread(void) { _PyRuntimeState *runtime = &_PyRuntime; - struct _ceval_runtime_state *ceval = &runtime->ceval; PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, NULL); ensure_tstate_not_null(__func__, tstate); + struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; assert(gil_created(&ceval->gil)); - drop_gil(ceval, tstate); + drop_gil(ceval, ceval2, tstate); return tstate; } @@ -847,12 +844,12 @@ eval_frame_handle_pending(PyThreadState *tstate) } /* GIL drop request */ - if (_Py_atomic_load_relaxed(&ceval->gil_drop_request)) { + if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request)) { /* Give another thread a chance */ if (_PyThreadState_Swap(&runtime->gilstate, NULL) != tstate) { Py_FatalError("tstate mix-up"); } - drop_gil(ceval, tstate); + drop_gil(ceval, ceval2, tstate); /* Other threads may run now */ diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h index a025a9fad1248..db47077d5c1ce 100644 --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -141,7 +141,8 @@ static void recreate_gil(struct _gil_runtime_state *gil) } static void -drop_gil(struct _ceval_runtime_state *ceval, PyThreadState *tstate) +drop_gil(struct _ceval_runtime_state *ceval, struct _ceval_state *ceval2, + PyThreadState *tstate) { struct _gil_runtime_state *gil = &ceval->gil; if (!_Py_atomic_load_relaxed(&gil->locked)) { @@ -163,7 +164,7 @@ drop_gil(struct _ceval_runtime_state *ceval, PyThreadState *tstate) MUTEX_UNLOCK(gil->mutex); #ifdef FORCE_SWITCHING - if (_Py_atomic_load_relaxed(&ceval->gil_drop_request) && tstate != NULL) { + if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request) && tstate != NULL) { MUTEX_LOCK(gil->switch_mutex); /* Not switched yet => wait */ if (((PyThreadState*)_Py_atomic_load_relaxed(&gil->last_holder)) == tstate) @@ -226,6 +227,7 @@ take_gil(PyThreadState *tstate) assert(is_tstate_valid(tstate)); PyInterpreterState *interp = tstate->interp; struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; struct _gil_runtime_state *gil = &ceval->gil; /* Check that _PyEval_InitThreads() was called to create the lock */ @@ -289,12 +291,12 @@ take_gil(PyThreadState *tstate) in take_gil() while the main thread called wait_for_thread_shutdown() from Py_Finalize(). */ MUTEX_UNLOCK(gil->mutex); - drop_gil(ceval, tstate); + drop_gil(ceval, ceval2, tstate); PyThread_exit_thread(); } assert(is_tstate_valid(tstate)); - if (_Py_atomic_load_relaxed(&ceval->gil_drop_request)) { + if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request)) { RESET_GIL_DROP_REQUEST(interp); } else { @@ -303,8 +305,7 @@ take_gil(PyThreadState *tstate) handle signals. Note: RESET_GIL_DROP_REQUEST() calls COMPUTE_EVAL_BREAKER(). */ - struct _ceval_state *ceval2 = &interp->ceval; - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + COMPUTE_EVAL_BREAKER(interp, ceval2); } /* Don't access tstate if the thread must exit */ From webhook-mailer at python.org Tue May 5 10:41:19 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 14:41:19 -0000 Subject: [Python-checkins] bpo-40514: Add --with-experimental-isolated-subinterpreters (GH-19926) Message-ID: https://github.com/python/cpython/commit/c5fa364f4ea836f25dd07cfb328152d40a568371 commit: c5fa364f4ea836f25dd07cfb328152d40a568371 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T16:41:11+02:00 summary: bpo-40514: Add --with-experimental-isolated-subinterpreters (GH-19926) Add --with-experimental-isolated-subinterpreters build option to configure: better isolate subinterpreters, experimental build mode. When used, force the usage of the libc malloc() memory allocator, since pymalloc relies on the unique global interpreter lock (GIL). files: A Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst M Python/preconfig.c M configure M configure.ac M pyconfig.h.in diff --git a/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst b/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst new file mode 100644 index 0000000000000..ab9062c28f4bb --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst @@ -0,0 +1,2 @@ +Add ``--with-experimental-isolated-subinterpreters`` build option to +``configure``: better isolate subinterpreters, experimental build mode. diff --git a/Python/preconfig.c b/Python/preconfig.c index 262738fa57da5..fd94d7dda1c29 100644 --- a/Python/preconfig.c +++ b/Python/preconfig.c @@ -291,7 +291,17 @@ _PyPreConfig_InitCompatConfig(PyPreConfig *config) config->coerce_c_locale_warn = 0; config->dev_mode = -1; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + /* bpo-40512: pymalloc is not compatible with subinterpreters, + force usage of libc malloc() which is thread-safe. */ +#ifdef Py_DEBUG + config->allocator = PYMEM_ALLOCATOR_MALLOC_DEBUG; +#else + config->allocator = PYMEM_ALLOCATOR_MALLOC; +#endif +#else config->allocator = PYMEM_ALLOCATOR_NOT_SET; +#endif #ifdef MS_WINDOWS config->legacy_windows_fs_encoding = -1; #endif diff --git a/configure b/configure index a8a35d0defc6b..26e9aa9fe454e 100755 --- a/configure +++ b/configure @@ -845,6 +845,7 @@ with_computed_gotos with_ensurepip with_openssl with_ssl_default_suites +with_experimental_isolated_subinterpreters ' ac_precious_vars='build_alias host_alias @@ -1575,6 +1576,9 @@ Optional Packages: leave OpenSSL's defaults untouched, STRING: use a custom string, PROTOCOL_SSLv2 ignores the setting, see Doc/library/ssl.rst + --with-experimental-isolated-subinterpreters + better isolate subinterpreters, experimental build + mode (default is no) Some influential environment variables: MACHDEP name for machine-dependent library files @@ -17489,6 +17493,30 @@ $as_echo "#define PY_SSL_DEFAULT_CIPHERS 1" >>confdefs.h fi +# --with-experimental-isolated-subinterpreters + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-experimental-isolated-subinterpreters" >&5 +$as_echo_n "checking for --with-experimental-isolated-subinterpreters... " >&6; } + +# Check whether --with-experimental-isolated-subinterpreters was given. +if test "${with_experimental_isolated_subinterpreters+set}" = set; then : + withval=$with_experimental_isolated_subinterpreters; +if test "$withval" != no +then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; }; + $as_echo "#define EXPERIMENTAL_ISOLATED_SUBINTERPRETERS 1" >>confdefs.h + +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; }; +fi +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + # generate output files ac_config_files="$ac_config_files Makefile.pre Misc/python.pc Misc/python-embed.pc Misc/python-config.sh" diff --git a/configure.ac b/configure.ac index f996051efc719..acb6d4bfa8da1 100644 --- a/configure.ac +++ b/configure.ac @@ -5717,6 +5717,23 @@ AC_MSG_RESULT(python) AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1) ]) +# --with-experimental-isolated-subinterpreters +AH_TEMPLATE(EXPERIMENTAL_ISOLATED_SUBINTERPRETERS, + [Better isolate subinterpreters, experimental build mode.]) +AC_MSG_CHECKING(for --with-experimental-isolated-subinterpreters) +AC_ARG_WITH(experimental-isolated-subinterpreters, + AS_HELP_STRING([--with-experimental-isolated-subinterpreters], + [better isolate subinterpreters, experimental build mode (default is no)]), +[ +if test "$withval" != no +then + AC_MSG_RESULT(yes); + AC_DEFINE(EXPERIMENTAL_ISOLATED_SUBINTERPRETERS) +else + AC_MSG_RESULT(no); +fi], +[AC_MSG_RESULT(no)]) + # generate output files AC_CONFIG_FILES(Makefile.pre Misc/python.pc Misc/python-embed.pc Misc/python-config.sh) diff --git a/pyconfig.h.in b/pyconfig.h.in index 75ac368aadafe..c06c4958726c0 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -38,6 +38,9 @@ /* Define if --enable-ipv6 is specified */ #undef ENABLE_IPV6 +/* Better isolate subinterpreters, experimental build mode. */ +#undef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + /* Define to 1 if your system stores words within floats with the most significant word first */ #undef FLOAT_WORDS_BIGENDIAN From webhook-mailer at python.org Tue May 5 10:50:04 2020 From: webhook-mailer at python.org (Javier Buzzi) Date: Tue, 05 May 2020 14:50:04 -0000 Subject: [Python-checkins] bpo-32117: Updated Simpsons names in docs (GH-19737) Message-ID: https://github.com/python/cpython/commit/627f7012353411590434a7d5777ddcbcc8d97fcd commit: 627f7012353411590434a7d5777ddcbcc8d97fcd branch: master author: Javier Buzzi committer: GitHub date: 2020-05-05T07:49:57-07:00 summary: bpo-32117: Updated Simpsons names in docs (GH-19737) `sally` is not a Simpsons character Automerge-Triggered-By: @gvanrossum files: M Doc/whatsnew/3.8.rst diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 6d2b0d905ff06..fdfc0a8f472cd 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -428,8 +428,8 @@ Other Language Changes lastname, *members = family.split() return lastname.upper(), *members - >>> parse('simpsons homer marge bart lisa sally') - ('SIMPSONS', 'homer', 'marge', 'bart', 'lisa', 'sally') + >>> parse('simpsons homer marge bart lisa maggie') + ('SIMPSONS', 'homer', 'marge', 'bart', 'lisa', 'maggie') (Contributed by David Cuthbert and Jordan Chapman in :issue:`32117`.) From webhook-mailer at python.org Tue May 5 10:52:59 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 14:52:59 -0000 Subject: [Python-checkins] bpo-40513: Per-interpreter recursion_limit (GH-19929) Message-ID: https://github.com/python/cpython/commit/4e30ed3af06ae655f4cb8aad8cba21f341384250 commit: 4e30ed3af06ae655f4cb8aad8cba21f341384250 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T16:52:52+02:00 summary: bpo-40513: Per-interpreter recursion_limit (GH-19929) Move recursion_limit member from _PyRuntimeState.ceval to PyInterpreterState.ceval. * Py_SetRecursionLimit() now only sets _Py_CheckRecursionLimit of ceval.c if the current Python thread is part of the main interpreter. * Inline _Py_MakeEndRecCheck() into _Py_LeaveRecursiveCall(). * Convert _Py_RecursionLimitLowerWaterMark() macro into a static inline function. files: M Include/internal/pycore_ceval.h M Include/internal/pycore_interp.h M Include/internal/pycore_runtime.h M Python/ceval.c diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 2df796deade3a..18c8f027af16e 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -65,12 +65,12 @@ PyAPI_DATA(int) _Py_CheckRecursionLimit; /* With USE_STACKCHECK macro defined, trigger stack checks in _Py_CheckRecursiveCall() on every 64th call to Py_EnterRecursiveCall. */ static inline int _Py_MakeRecCheck(PyThreadState *tstate) { - return (++tstate->recursion_depth > _Py_CheckRecursionLimit + return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit || ++tstate->stackcheck_counter > 64); } #else static inline int _Py_MakeRecCheck(PyThreadState *tstate) { - return (++tstate->recursion_depth > _Py_CheckRecursionLimit); + return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit); } #endif @@ -90,20 +90,22 @@ static inline int _Py_EnterRecursiveCall_inline(const char *where) { #define Py_EnterRecursiveCall(where) _Py_EnterRecursiveCall_inline(where) - /* Compute the "lower-water mark" for a recursion limit. When * Py_LeaveRecursiveCall() is called with a recursion depth below this mark, * the overflowed flag is reset to 0. */ -#define _Py_RecursionLimitLowerWaterMark(limit) \ - (((limit) > 200) \ - ? ((limit) - 50) \ - : (3 * ((limit) >> 2))) - -#define _Py_MakeEndRecCheck(x) \ - (--(x) < _Py_RecursionLimitLowerWaterMark(_Py_CheckRecursionLimit)) +static inline int _Py_RecursionLimitLowerWaterMark(int limit) { + if (limit > 200) { + return (limit - 50); + } + else { + return (3 * (limit >> 2)); + } +} static inline void _Py_LeaveRecursiveCall(PyThreadState *tstate) { - if (_Py_MakeEndRecCheck(tstate->recursion_depth)) { + tstate->recursion_depth--; + int limit = tstate->interp->ceval.recursion_limit; + if (tstate->recursion_depth < _Py_RecursionLimitLowerWaterMark(limit)) { tstate->overflowed = 0; } } diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index fafc72eb97a00..08291012365ed 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -33,6 +33,7 @@ struct _pending_calls { }; struct _ceval_state { + int recursion_limit; /* Records whether tracing is on for any thread. Counts the number of threads for which tstate->c_tracefunc is non-NULL, so if the value is 0, we know we don't have to check this thread's diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index c59733559167a..8ca1dfbb3f0a6 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -14,7 +14,6 @@ extern "C" { /* ceval state */ struct _ceval_runtime_state { - int recursion_limit; struct _gil_runtime_state gil; }; diff --git a/Python/ceval.c b/Python/ceval.c index 6b002730c8d78..601e21a2fccd2 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -699,7 +699,6 @@ int _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT; void _PyEval_InitRuntimeState(struct _ceval_runtime_state *ceval) { - ceval->recursion_limit = Py_DEFAULT_RECURSION_LIMIT; _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT; _gil_initialize(&ceval->gil); } @@ -707,6 +706,8 @@ _PyEval_InitRuntimeState(struct _ceval_runtime_state *ceval) int _PyEval_InitState(struct _ceval_state *ceval) { + ceval->recursion_limit = Py_DEFAULT_RECURSION_LIMIT; + struct _pending_calls *pending = &ceval->pending; assert(pending->lock == NULL); @@ -730,16 +731,18 @@ _PyEval_FiniState(struct _ceval_state *ceval) int Py_GetRecursionLimit(void) { - struct _ceval_runtime_state *ceval = &_PyRuntime.ceval; - return ceval->recursion_limit; + PyThreadState *tstate = _PyThreadState_GET(); + return tstate->interp->ceval.recursion_limit; } void Py_SetRecursionLimit(int new_limit) { - struct _ceval_runtime_state *ceval = &_PyRuntime.ceval; - ceval->recursion_limit = new_limit; - _Py_CheckRecursionLimit = new_limit; + PyThreadState *tstate = _PyThreadState_GET(); + tstate->interp->ceval.recursion_limit = new_limit; + if (_Py_IsMainInterpreter(tstate)) { + _Py_CheckRecursionLimit = new_limit; + } } /* The function _Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall() @@ -750,8 +753,7 @@ Py_SetRecursionLimit(int new_limit) int _Py_CheckRecursiveCall(PyThreadState *tstate, const char *where) { - _PyRuntimeState *runtime = tstate->interp->runtime; - int recursion_limit = runtime->ceval.recursion_limit; + int recursion_limit = tstate->interp->ceval.recursion_limit; #ifdef USE_STACKCHECK tstate->stackcheck_counter = 0; @@ -760,8 +762,10 @@ _Py_CheckRecursiveCall(PyThreadState *tstate, const char *where) _PyErr_SetString(tstate, PyExc_MemoryError, "Stack overflow"); return -1; } - /* Needed for ABI backwards-compatibility (see bpo-31857) */ - _Py_CheckRecursionLimit = recursion_limit; + if (_Py_IsMainInterpreter(tstate)) { + /* Needed for ABI backwards-compatibility (see bpo-31857) */ + _Py_CheckRecursionLimit = recursion_limit; + } #endif if (tstate->recursion_critical) /* Somebody asked that we don't check for recursion. */ From webhook-mailer at python.org Tue May 5 11:07:50 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 15:07:50 -0000 Subject: [Python-checkins] bpo-29587: _PyErr_ChainExceptions() checks exception (GH-19902) Message-ID: https://github.com/python/cpython/commit/b0be6b3b94fbdf31b796adc19dc86a04a52b03e1 commit: b0be6b3b94fbdf31b796adc19dc86a04a52b03e1 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T17:07:41+02:00 summary: bpo-29587: _PyErr_ChainExceptions() checks exception (GH-19902) _PyErr_ChainExceptions() now ensures that the first parameter is an exception type, as done by _PyErr_SetObject(). * The following function now check PyExceptionInstance_Check() in an assertion using a new _PyBaseExceptionObject_cast() helper function: * PyException_GetTraceback(), PyException_SetTraceback() * PyException_GetCause(), PyException_SetCause() * PyException_GetContext(), PyException_SetContext() * PyExceptionClass_Name() now checks PyExceptionClass_Check() with an assertion. * Remove XXX comment and add gi_exc_state variable to _gen_throw(). * Remove comment from test_generators files: M Lib/test/test_generators.py M Objects/exceptions.c M Objects/genobject.c M Python/errors.c diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 5824ecd7c37e8..e047801199680 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -342,9 +342,6 @@ def g(): try: yield except Exception: - # Without the `gi_exc_state.exc_type != Py_None` in - # _gen_throw(), this line was causing a crash ("Segmentation - # fault (core dumped)") on e.g. Fedora 32. raise RuntimeError gen = g() diff --git a/Objects/exceptions.c b/Objects/exceptions.c index ca917b436c4bb..db5e3da12b00f 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -304,22 +304,33 @@ static PyGetSetDef BaseException_getset[] = { }; +static inline PyBaseExceptionObject* +_PyBaseExceptionObject_cast(PyObject *exc) +{ + assert(PyExceptionInstance_Check(exc)); + return (PyBaseExceptionObject *)exc; +} + + PyObject * -PyException_GetTraceback(PyObject *self) { - PyBaseExceptionObject *base_self = (PyBaseExceptionObject *)self; +PyException_GetTraceback(PyObject *self) +{ + PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); Py_XINCREF(base_self->traceback); return base_self->traceback; } int -PyException_SetTraceback(PyObject *self, PyObject *tb) { - return BaseException_set_tb((PyBaseExceptionObject *)self, tb, NULL); +PyException_SetTraceback(PyObject *self, PyObject *tb) +{ + return BaseException_set_tb(_PyBaseExceptionObject_cast(self), tb, NULL); } PyObject * -PyException_GetCause(PyObject *self) { - PyObject *cause = ((PyBaseExceptionObject *)self)->cause; +PyException_GetCause(PyObject *self) +{ + PyObject *cause = _PyBaseExceptionObject_cast(self)->cause; Py_XINCREF(cause); return cause; } @@ -328,13 +339,15 @@ PyException_GetCause(PyObject *self) { void PyException_SetCause(PyObject *self, PyObject *cause) { - ((PyBaseExceptionObject *)self)->suppress_context = 1; - Py_XSETREF(((PyBaseExceptionObject *)self)->cause, cause); + PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); + base_self->suppress_context = 1; + Py_XSETREF(base_self->cause, cause); } PyObject * -PyException_GetContext(PyObject *self) { - PyObject *context = ((PyBaseExceptionObject *)self)->context; +PyException_GetContext(PyObject *self) +{ + PyObject *context = _PyBaseExceptionObject_cast(self)->context; Py_XINCREF(context); return context; } @@ -343,7 +356,7 @@ PyException_GetContext(PyObject *self) { void PyException_SetContext(PyObject *self, PyObject *context) { - Py_XSETREF(((PyBaseExceptionObject *)self)->context, context); + Py_XSETREF(_PyBaseExceptionObject_cast(self)->context, context); } #undef PyExceptionClass_Name @@ -351,6 +364,7 @@ PyException_SetContext(PyObject *self, PyObject *context) const char * PyExceptionClass_Name(PyObject *ob) { + assert(PyExceptionClass_Check(ob)); return ((PyTypeObject*)ob)->tp_name; } diff --git a/Objects/genobject.c b/Objects/genobject.c index b27fa929a2625..5b253edfdcd0f 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -512,15 +512,15 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, } PyErr_Restore(typ, val, tb); - /* XXX It seems like we shouldn't have to check not equal to Py_None - here because exc_type should only ever be a class. But not including - this check was causing crashes on certain tests e.g. on Fedora. */ - if (gen->gi_exc_state.exc_type && gen->gi_exc_state.exc_type != Py_None) { - Py_INCREF(gen->gi_exc_state.exc_type); - Py_XINCREF(gen->gi_exc_state.exc_value); - Py_XINCREF(gen->gi_exc_state.exc_traceback); - _PyErr_ChainExceptions(gen->gi_exc_state.exc_type, - gen->gi_exc_state.exc_value, gen->gi_exc_state.exc_traceback); + + _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state; + if (gi_exc_state->exc_type != NULL && gi_exc_state->exc_type != Py_None) { + Py_INCREF(gi_exc_state->exc_type); + Py_XINCREF(gi_exc_state->exc_value); + Py_XINCREF(gi_exc_state->exc_traceback); + _PyErr_ChainExceptions(gi_exc_state->exc_type, + gi_exc_state->exc_value, + gi_exc_state->exc_traceback); } return gen_send_ex(gen, Py_None, 1, 0); diff --git a/Python/errors.c b/Python/errors.c index 9e53d050416ff..f856a798eed1e 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -107,7 +107,8 @@ _PyErr_SetObject(PyThreadState *tstate, PyObject *exception, PyObject *value) if (exception != NULL && !PyExceptionClass_Check(exception)) { _PyErr_Format(tstate, PyExc_SystemError, - "exception %R not a BaseException subclass", + "_PyErr_SetObject: " + "exception %R is not a BaseException subclass", exception); return; } @@ -484,6 +485,15 @@ _PyErr_ChainExceptions(PyObject *exc, PyObject *val, PyObject *tb) return; PyThreadState *tstate = _PyThreadState_GET(); + + if (!PyExceptionClass_Check(exc)) { + _PyErr_Format(tstate, PyExc_SystemError, + "_PyErr_ChainExceptions: " + "exception %R is not a BaseException subclass", + exc); + return; + } + if (_PyErr_Occurred(tstate)) { PyObject *exc2, *val2, *tb2; _PyErr_Fetch(tstate, &exc2, &val2, &tb2); From webhook-mailer at python.org Tue May 5 11:20:46 2020 From: webhook-mailer at python.org (Hai Shi) Date: Tue, 05 May 2020 15:20:46 -0000 Subject: [Python-checkins] bpo-40520: Remove redundant comment in pydebug.h (GH-19931) Message-ID: https://github.com/python/cpython/commit/6351d9e4400a77fe1fcbe4f03e5fb6620cca236d commit: 6351d9e4400a77fe1fcbe4f03e5fb6620cca236d branch: master author: Hai Shi committer: GitHub date: 2020-05-05T08:20:38-07:00 summary: bpo-40520: Remove redundant comment in pydebug.h (GH-19931) Automerge-Triggered-By: @corona10 files: M Include/pydebug.h diff --git a/Include/pydebug.h b/Include/pydebug.h index bd4aafe3b49f8..78bcb118be465 100644 --- a/Include/pydebug.h +++ b/Include/pydebug.h @@ -5,8 +5,6 @@ extern "C" { #endif -/* These global variable are defined in pylifecycle.c */ -/* XXX (ncoghlan): move these declarations to pylifecycle.h? */ PyAPI_DATA(int) Py_DebugFlag; PyAPI_DATA(int) Py_VerboseFlag; PyAPI_DATA(int) Py_QuietFlag; From webhook-mailer at python.org Tue May 5 11:40:26 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 15:40:26 -0000 Subject: [Python-checkins] Revert "bpo-40513: Per-interpreter signals pending (GH-19924)" (GH-19932) Message-ID: https://github.com/python/cpython/commit/299b8c61e9d1a42b929b8deb1b05067876e191e6 commit: 299b8c61e9d1a42b929b8deb1b05067876e191e6 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T17:40:18+02:00 summary: Revert "bpo-40513: Per-interpreter signals pending (GH-19924)" (GH-19932) This reverts commit 4e01946cafca0cf49f796c3118e0d65237bcad69. files: M Include/internal/pycore_interp.h M Include/internal/pycore_runtime.h M Python/ceval.c M Python/ceval_gil.h diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 08291012365ed..5bf8998e67320 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -46,8 +46,6 @@ struct _ceval_state { /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; struct _pending_calls pending; - /* Request for checking signals. */ - _Py_atomic_int signals_pending; }; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 8ca1dfbb3f0a6..34eb492b9f254 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -14,6 +14,11 @@ extern "C" { /* ceval state */ struct _ceval_runtime_state { + /* Request for checking signals. It is shared by all interpreters (see + bpo-40513). Any thread of any interpreter can receive a signal, but only + the main thread of the main interpreter can handle signals: see + _Py_ThreadCanHandleSignals(). */ + _Py_atomic_int signals_pending; struct _gil_runtime_state gil; }; diff --git a/Python/ceval.c b/Python/ceval.c index 601e21a2fccd2..0c08a76f7d113 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -143,70 +143,76 @@ is_tstate_valid(PyThreadState *tstate) the GIL eventually anyway. */ static inline void COMPUTE_EVAL_BREAKER(PyInterpreterState *interp, - struct _ceval_state *ceval) + struct _ceval_runtime_state *ceval, + struct _ceval_state *ceval2) { - _Py_atomic_store_relaxed(&ceval->eval_breaker, - _Py_atomic_load_relaxed(&ceval->gil_drop_request) + _Py_atomic_store_relaxed(&ceval2->eval_breaker, + _Py_atomic_load_relaxed(&ceval2->gil_drop_request) | (_Py_atomic_load_relaxed(&ceval->signals_pending) && _Py_ThreadCanHandleSignals(interp)) - | (_Py_atomic_load_relaxed(&ceval->pending.calls_to_do) + | (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do) && _Py_ThreadCanHandlePendingCalls()) - | ceval->pending.async_exc); + | ceval2->pending.async_exc); } static inline void SET_GIL_DROP_REQUEST(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->gil_drop_request, 1); - _Py_atomic_store_relaxed(&ceval->eval_breaker, 1); + struct _ceval_state *ceval2 = &interp->ceval; + _Py_atomic_store_relaxed(&ceval2->gil_drop_request, 1); + _Py_atomic_store_relaxed(&ceval2->eval_breaker, 1); } static inline void RESET_GIL_DROP_REQUEST(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->gil_drop_request, 0); - COMPUTE_EVAL_BREAKER(interp, ceval); + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; + _Py_atomic_store_relaxed(&ceval2->gil_drop_request, 0); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } static inline void SIGNAL_PENDING_CALLS(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->pending.calls_to_do, 1); - COMPUTE_EVAL_BREAKER(interp, ceval); + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; + _Py_atomic_store_relaxed(&ceval2->pending.calls_to_do, 1); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } static inline void UNSIGNAL_PENDING_CALLS(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->pending.calls_to_do, 0); - COMPUTE_EVAL_BREAKER(interp, ceval); + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; + _Py_atomic_store_relaxed(&ceval2->pending.calls_to_do, 0); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } static inline void SIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; _Py_atomic_store_relaxed(&ceval->signals_pending, 1); /* eval_breaker is not set to 1 if thread_can_handle_signals() is false */ - COMPUTE_EVAL_BREAKER(interp, ceval); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } static inline void UNSIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; _Py_atomic_store_relaxed(&ceval->signals_pending, 0); - COMPUTE_EVAL_BREAKER(interp, ceval); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -222,9 +228,10 @@ SIGNAL_ASYNC_EXC(PyInterpreterState *interp) static inline void UNSIGNAL_ASYNC_EXC(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - ceval->pending.async_exc = 0; - COMPUTE_EVAL_BREAKER(interp, ceval); + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; + ceval2->pending.async_exc = 0; + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -349,11 +356,12 @@ PyEval_ReleaseLock(void) { _PyRuntimeState *runtime = &_PyRuntime; PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); - struct _ceval_state *ceval2 = &tstate->interp->ceval; /* This function must succeed when the current thread state is NULL. We therefore avoid PyThreadState_Get() which dumps a fatal error in debug mode. */ - drop_gil(&runtime->ceval, ceval2, tstate); + struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; + drop_gil(ceval, ceval2, tstate); } void @@ -435,7 +443,6 @@ PyThreadState * PyEval_SaveThread(void) { _PyRuntimeState *runtime = &_PyRuntime; - PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, NULL); ensure_tstate_not_null(__func__, tstate); @@ -831,16 +838,16 @@ eval_frame_handle_pending(PyThreadState *tstate) { _PyRuntimeState * const runtime = &_PyRuntime; struct _ceval_runtime_state *ceval = &runtime->ceval; - struct _ceval_state *ceval2 = &tstate->interp->ceval; /* Pending signals */ - if (_Py_atomic_load_relaxed(&ceval2->signals_pending)) { + if (_Py_atomic_load_relaxed(&ceval->signals_pending)) { if (handle_signals(tstate) != 0) { return -1; } } /* Pending calls */ + struct _ceval_state *ceval2 = &tstate->interp->ceval; if (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do)) { if (make_pending_calls(tstate) != 0) { return -1; diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h index db47077d5c1ce..f25f810073294 100644 --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -305,7 +305,7 @@ take_gil(PyThreadState *tstate) handle signals. Note: RESET_GIL_DROP_REQUEST() calls COMPUTE_EVAL_BREAKER(). */ - COMPUTE_EVAL_BREAKER(interp, ceval2); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } /* Don't access tstate if the thread must exit */ From webhook-mailer at python.org Tue May 5 12:50:38 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 16:50:38 -0000 Subject: [Python-checkins] bpo-40521: Disable Unicode caches in isolated subinterpreters (GH-19933) Message-ID: https://github.com/python/cpython/commit/607b1027fec7b4a1602aab7df57795fbcec1c51b commit: 607b1027fec7b4a1602aab7df57795fbcec1c51b branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T18:50:30+02:00 summary: bpo-40521: Disable Unicode caches in isolated subinterpreters (GH-19933) When Python is built in the experimental isolated subinterpreters mode, disable Unicode singletons and Unicode interned strings since they are shared by all interpreters. Temporary workaround until these caches are made per-interpreter. files: M Objects/typeobject.c M Objects/unicodeobject.c diff --git a/Objects/typeobject.c b/Objects/typeobject.c index db0ae970090ba..1565b90898605 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -56,6 +56,11 @@ static size_t method_cache_misses = 0; static size_t method_cache_collisions = 0; #endif +/* bpo-40521: Interned strings are shared by all subinterpreters */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define INTERN_NAME_STRINGS +#endif + /* alphabetical order */ _Py_IDENTIFIER(__abstractmethods__); _Py_IDENTIFIER(__class__); @@ -3418,6 +3423,7 @@ type_setattro(PyTypeObject *type, PyObject *name, PyObject *value) if (name == NULL) return -1; } +#ifdef INTERN_NAME_STRINGS if (!PyUnicode_CHECK_INTERNED(name)) { PyUnicode_InternInPlace(&name); if (!PyUnicode_CHECK_INTERNED(name)) { @@ -3427,6 +3433,7 @@ type_setattro(PyTypeObject *type, PyObject *name, PyObject *value) return -1; } } +#endif } else { /* Will fail in _PyObject_GenericSetAttrWithDict. */ @@ -7531,10 +7538,17 @@ _PyTypes_InitSlotDefs(void) for (slotdef *p = slotdefs; p->name; p++) { /* Slots must be ordered by their offset in the PyHeapTypeObject. */ assert(!p[1].name || p->offset <= p[1].offset); +#ifdef INTERN_NAME_STRINGS p->name_strobj = PyUnicode_InternFromString(p->name); if (!p->name_strobj || !PyUnicode_CHECK_INTERNED(p->name_strobj)) { return _PyStatus_NO_MEMORY(); } +#else + p->name_strobj = PyUnicode_FromString(p->name); + if (!p->name_strobj) { + return _PyStatus_NO_MEMORY(); + } +#endif } slotdefs_initialized = 1; return _PyStatus_OK(); @@ -7559,7 +7573,9 @@ update_slot(PyTypeObject *type, PyObject *name) int offset; assert(PyUnicode_CheckExact(name)); +#ifdef INTERN_NAME_STRINGS assert(PyUnicode_CHECK_INTERNED(name)); +#endif assert(slotdefs_initialized); pp = ptrs; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index aba7407533c4e..18b9458721de1 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -198,6 +198,11 @@ extern "C" { # define OVERALLOCATE_FACTOR 4 #endif +/* bpo-40521: Interned strings are shared by all interpreters. */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define INTERNED_STRINGS +#endif + /* This dictionary holds all interned unicode strings. Note that references to strings in this dictionary are *not* counted in the string's ob_refcnt. When the interned string reaches a refcnt of 0 the string deallocation @@ -206,7 +211,9 @@ extern "C" { Another way to look at this is that to say that the actual reference count of a string is: s->ob_refcnt + (s->state ? 2 : 0) */ +#ifdef INTERNED_STRINGS static PyObject *interned = NULL; +#endif /* The empty Unicode object is shared to improve performance. */ static PyObject *unicode_empty = NULL; @@ -281,9 +288,16 @@ unicode_decode_utf8(const char *s, Py_ssize_t size, /* List of static strings. */ static _Py_Identifier *static_strings = NULL; +/* bpo-40521: Latin1 singletons are shared by all interpreters. */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define LATIN1_SINGLETONS +#endif + +#ifdef LATIN1_SINGLETONS /* Single character Unicode strings in the Latin-1 range are being shared as well. */ static PyObject *unicode_latin1[256] = {NULL}; +#endif /* Fast detection of the most frequent whitespace characters */ const unsigned char _Py_ascii_whitespace[] = { @@ -662,6 +676,7 @@ unicode_result_ready(PyObject *unicode) return unicode_empty; } +#ifdef LATIN1_SINGLETONS if (length == 1) { const void *data = PyUnicode_DATA(unicode); int kind = PyUnicode_KIND(unicode); @@ -683,6 +698,7 @@ unicode_result_ready(PyObject *unicode) } } } +#endif assert(_PyUnicode_CheckConsistency(unicode, 1)); return unicode; @@ -1913,10 +1929,12 @@ unicode_dealloc(PyObject *unicode) case SSTATE_INTERNED_MORTAL: /* revive dead object temporarily for DelItem */ Py_SET_REFCNT(unicode, 3); +#ifdef INTERNED_STRINGS if (PyDict_DelItem(interned, unicode) != 0) { _PyErr_WriteUnraisableMsg("deletion of interned string failed", NULL); } +#endif break; case SSTATE_INTERNED_IMMORTAL: @@ -1944,15 +1962,18 @@ unicode_dealloc(PyObject *unicode) static int unicode_is_singleton(PyObject *unicode) { - PyASCIIObject *ascii = (PyASCIIObject *)unicode; - if (unicode == unicode_empty) + if (unicode == unicode_empty) { return 1; + } +#ifdef LATIN1_SINGLETONS + PyASCIIObject *ascii = (PyASCIIObject *)unicode; if (ascii->state.kind != PyUnicode_WCHAR_KIND && ascii->length == 1) { Py_UCS4 ch = PyUnicode_READ_CHAR(unicode, 0); if (ch < 256 && unicode_latin1[ch] == unicode) return 1; } +#endif return 0; } #endif @@ -2094,16 +2115,28 @@ unicode_write_cstr(PyObject *unicode, Py_ssize_t index, static PyObject* get_latin1_char(unsigned char ch) { - PyObject *unicode = unicode_latin1[ch]; + PyObject *unicode; + +#ifdef LATIN1_SINGLETONS + unicode = unicode_latin1[ch]; + if (unicode) { + Py_INCREF(unicode); + return unicode; + } +#endif + + unicode = PyUnicode_New(1, ch); if (!unicode) { - unicode = PyUnicode_New(1, ch); - if (!unicode) - return NULL; - PyUnicode_1BYTE_DATA(unicode)[0] = ch; - assert(_PyUnicode_CheckConsistency(unicode, 1)); - unicode_latin1[ch] = unicode; + return NULL; } + + PyUnicode_1BYTE_DATA(unicode)[0] = ch; + assert(_PyUnicode_CheckConsistency(unicode, 1)); + +#ifdef LATIN1_SINGLETONS Py_INCREF(unicode); + unicode_latin1[ch] = unicode; +#endif return unicode; } @@ -11270,7 +11303,6 @@ int _PyUnicode_EqualToASCIIId(PyObject *left, _Py_Identifier *right) { PyObject *right_uni; - Py_hash_t hash; assert(_PyUnicode_CHECK(left)); assert(right->string); @@ -11302,10 +11334,12 @@ _PyUnicode_EqualToASCIIId(PyObject *left, _Py_Identifier *right) if (PyUnicode_CHECK_INTERNED(left)) return 0; +#ifdef INTERNED_STRINGS assert(_PyUnicode_HASH(right_uni) != -1); - hash = _PyUnicode_HASH(left); + Py_hash_t hash = _PyUnicode_HASH(left); if (hash != -1 && hash != _PyUnicode_HASH(right_uni)) return 0; +#endif return unicode_compare_eq(left, right_uni); } @@ -15487,20 +15521,26 @@ void PyUnicode_InternInPlace(PyObject **p) { PyObject *s = *p; - PyObject *t; #ifdef Py_DEBUG assert(s != NULL); assert(_PyUnicode_CHECK(s)); #else - if (s == NULL || !PyUnicode_Check(s)) + if (s == NULL || !PyUnicode_Check(s)) { return; + } #endif + /* If it's a subclass, we don't really know what putting it in the interned dict might do. */ - if (!PyUnicode_CheckExact(s)) + if (!PyUnicode_CheckExact(s)) { return; - if (PyUnicode_CHECK_INTERNED(s)) + } + + if (PyUnicode_CHECK_INTERNED(s)) { return; + } + +#ifdef INTERNED_STRINGS if (interned == NULL) { interned = PyDict_New(); if (interned == NULL) { @@ -15508,22 +15548,28 @@ PyUnicode_InternInPlace(PyObject **p) return; } } + + PyObject *t; Py_ALLOW_RECURSION t = PyDict_SetDefault(interned, s, s); Py_END_ALLOW_RECURSION + if (t == NULL) { PyErr_Clear(); return; } + if (t != s) { Py_INCREF(t); Py_SETREF(*p, t); return; } + /* The two references in interned are not counted by refcnt. The deallocator will take care of this */ Py_SET_REFCNT(s, Py_REFCNT(s) - 2); _PyUnicode_STATE(s).interned = SSTATE_INTERNED_MORTAL; +#endif } void @@ -16109,9 +16155,11 @@ _PyUnicode_Fini(PyThreadState *tstate) Py_CLEAR(unicode_empty); +#ifdef LATIN1_SINGLETONS for (Py_ssize_t i = 0; i < 256; i++) { Py_CLEAR(unicode_latin1[i]); } +#endif _PyUnicode_ClearStaticStrings(); } From webhook-mailer at python.org Tue May 5 13:45:43 2020 From: webhook-mailer at python.org (Steve Dower) Date: Tue, 05 May 2020 17:45:43 -0000 Subject: [Python-checkins] bpo-40458: Increase reserved stack space to prevent overflow crash on Windows (GH-19845) Message-ID: https://github.com/python/cpython/commit/ac4bf424119d1300f57929120968e216a85d3a25 commit: ac4bf424119d1300f57929120968e216a85d3a25 branch: master author: Steve Dower committer: GitHub date: 2020-05-05T18:45:35+01:00 summary: bpo-40458: Increase reserved stack space to prevent overflow crash on Windows (GH-19845) files: A Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst M PCbuild/python_uwp.vcxproj M PCbuild/pythonw_uwp.vcxproj diff --git a/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst new file mode 100644 index 0000000000000..4dc1ff480df87 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst @@ -0,0 +1 @@ +Increase reserved stack space to prevent overflow crash on Windows. diff --git a/PCbuild/python_uwp.vcxproj b/PCbuild/python_uwp.vcxproj index 5ff120a0da331..fb27e9e71222e 100644 --- a/PCbuild/python_uwp.vcxproj +++ b/PCbuild/python_uwp.vcxproj @@ -95,6 +95,7 @@ windowsapp.lib;%(AdditionalDependencies) Console + 2000000 diff --git a/PCbuild/pythonw_uwp.vcxproj b/PCbuild/pythonw_uwp.vcxproj index 828d0d1ccac21..e21e46a1b722e 100644 --- a/PCbuild/pythonw_uwp.vcxproj +++ b/PCbuild/pythonw_uwp.vcxproj @@ -95,6 +95,7 @@ windowsapp.lib;%(AdditionalDependencies) Windows + 2000000 From webhook-mailer at python.org Tue May 5 13:55:34 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 17:55:34 -0000 Subject: [Python-checkins] bpo-40521: Disable free lists in subinterpreters (GH-19937) Message-ID: https://github.com/python/cpython/commit/b4b53868d7d6cd13505321d3802fd00865b25e05 commit: b4b53868d7d6cd13505321d3802fd00865b25e05 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T19:55:29+02:00 summary: bpo-40521: Disable free lists in subinterpreters (GH-19937) When Python is built with experimental isolated interpreters, disable tuple, dict and free free lists. Temporary workaround until these caches are made per-interpreter. Add frame_alloc() and frame_get_builtins() subfunctions to simplify _PyFrame_New_NoTrack(). files: M Objects/dictobject.c M Objects/frameobject.c M Objects/tupleobject.c diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 9c35f3c3f14d0..fa35d16478f63 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -250,16 +250,26 @@ static uint64_t pydict_global_version = 0; #ifndef PyDict_MAXFREELIST #define PyDict_MAXFREELIST 80 #endif + +/* bpo-40521: dict free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyDict_MAXFREELIST +# define PyDict_MAXFREELIST 0 +#endif + +#if PyDict_MAXFREELIST > 0 static PyDictObject *free_list[PyDict_MAXFREELIST]; static int numfree = 0; static PyDictKeysObject *keys_free_list[PyDict_MAXFREELIST]; static int numfreekeys = 0; +#endif #include "clinic/dictobject.c.h" void _PyDict_ClearFreeList(void) { +#if PyDict_MAXFREELIST > 0 while (numfree) { PyDictObject *op = free_list[--numfree]; assert(PyDict_CheckExact(op)); @@ -268,14 +278,17 @@ _PyDict_ClearFreeList(void) while (numfreekeys) { PyObject_FREE(keys_free_list[--numfreekeys]); } +#endif } /* Print summary info about the state of the optimized allocator */ void _PyDict_DebugMallocStats(FILE *out) { +#if PyDict_MAXFREELIST > 0 _PyDebugAllocatorStats(out, "free PyDictObject", numfree, sizeof(PyDictObject)); +#endif } @@ -553,10 +566,13 @@ static PyDictKeysObject *new_keys_object(Py_ssize_t size) es = sizeof(Py_ssize_t); } +#if PyDict_MAXFREELIST > 0 if (size == PyDict_MINSIZE && numfreekeys > 0) { dk = keys_free_list[--numfreekeys]; } - else { + else +#endif + { dk = PyObject_MALLOC(sizeof(PyDictKeysObject) + es * size + sizeof(PyDictKeyEntry) * usable); @@ -587,10 +603,12 @@ free_keys_object(PyDictKeysObject *keys) Py_XDECREF(entries[i].me_key); Py_XDECREF(entries[i].me_value); } +#if PyDict_MAXFREELIST > 0 if (keys->dk_size == PyDict_MINSIZE && numfreekeys < PyDict_MAXFREELIST) { keys_free_list[numfreekeys++] = keys; return; } +#endif PyObject_FREE(keys); } @@ -603,13 +621,16 @@ new_dict(PyDictKeysObject *keys, PyObject **values) { PyDictObject *mp; assert(keys != NULL); +#if PyDict_MAXFREELIST > 0 if (numfree) { mp = free_list[--numfree]; assert (mp != NULL); assert (Py_IS_TYPE(mp, &PyDict_Type)); _Py_NewReference((PyObject *)mp); } - else { + else +#endif + { mp = PyObject_GC_New(PyDictObject, &PyDict_Type); if (mp == NULL) { dictkeys_decref(keys); @@ -1258,12 +1279,15 @@ dictresize(PyDictObject *mp, Py_ssize_t minsize) #ifdef Py_REF_DEBUG _Py_RefTotal--; #endif +#if PyDict_MAXFREELIST > 0 if (oldkeys->dk_size == PyDict_MINSIZE && numfreekeys < PyDict_MAXFREELIST) { keys_free_list[numfreekeys++] = oldkeys; } - else { + else +#endif + { PyObject_FREE(oldkeys); } } @@ -2005,10 +2029,15 @@ dict_dealloc(PyDictObject *mp) assert(keys->dk_refcnt == 1); dictkeys_decref(keys); } - if (numfree < PyDict_MAXFREELIST && Py_IS_TYPE(mp, &PyDict_Type)) +#if PyDict_MAXFREELIST > 0 + if (numfree < PyDict_MAXFREELIST && Py_IS_TYPE(mp, &PyDict_Type)) { free_list[numfree++] = mp; + } else +#endif + { Py_TYPE(mp)->tp_free((PyObject *)mp); + } Py_TRASHCAN_END } diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 4f5054d32bb01..af32276c98b24 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -556,11 +556,19 @@ static PyGetSetDef frame_getsetlist[] = { free_list. Else programs creating lots of cyclic trash involving frames could provoke free_list into growing without bound. */ +/* max value for numfree */ +#define PyFrame_MAXFREELIST 200 + +/* bpo-40521: frame free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyFrame_MAXFREELIST +# define PyFrame_MAXFREELIST 0 +#endif +#if PyFrame_MAXFREELIST > 0 static PyFrameObject *free_list = NULL; static int numfree = 0; /* number of frames currently in free_list */ -/* max value for numfree */ -#define PyFrame_MAXFREELIST 200 +#endif static void _Py_HOT_FUNCTION frame_dealloc(PyFrameObject *f) @@ -590,15 +598,19 @@ frame_dealloc(PyFrameObject *f) Py_CLEAR(f->f_trace); co = f->f_code; - if (co->co_zombieframe == NULL) + if (co->co_zombieframe == NULL) { co->co_zombieframe = f; + } +#if PyFrame_MAXFREELIST > 0 else if (numfree < PyFrame_MAXFREELIST) { ++numfree; f->f_back = free_list; free_list = f; } - else +#endif + else { PyObject_GC_Del(f); + } Py_DECREF(co); Py_TRASHCAN_SAFE_END(f) @@ -759,98 +771,127 @@ PyTypeObject PyFrame_Type = { _Py_IDENTIFIER(__builtins__); -PyFrameObject* _Py_HOT_FUNCTION -_PyFrame_New_NoTrack(PyThreadState *tstate, PyCodeObject *code, - PyObject *globals, PyObject *locals) +static inline PyFrameObject* +frame_alloc(PyCodeObject *code) { - PyFrameObject *back = tstate->frame; PyFrameObject *f; - PyObject *builtins; - Py_ssize_t i; -#ifdef Py_DEBUG - if (code == NULL || globals == NULL || !PyDict_Check(globals) || - (locals != NULL && !PyMapping_Check(locals))) { - PyErr_BadInternalCall(); - return NULL; + f = code->co_zombieframe; + if (f != NULL) { + code->co_zombieframe = NULL; + _Py_NewReference((PyObject *)f); + assert(f->f_code == code); + return f; } + + Py_ssize_t ncells = PyTuple_GET_SIZE(code->co_cellvars); + Py_ssize_t nfrees = PyTuple_GET_SIZE(code->co_freevars); + Py_ssize_t extras = code->co_stacksize + code->co_nlocals + ncells + nfrees; +#if PyFrame_MAXFREELIST > 0 + if (free_list == NULL) #endif - if (back == NULL || back->f_globals != globals) { - builtins = _PyDict_GetItemIdWithError(globals, &PyId___builtins__); - if (builtins) { - if (PyModule_Check(builtins)) { - builtins = PyModule_GetDict(builtins); - assert(builtins != NULL); - } + { + f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras); + if (f == NULL) { + return NULL; } - if (builtins == NULL) { - if (PyErr_Occurred()) { + } +#if PyFrame_MAXFREELIST > 0 + else { + assert(numfree > 0); + --numfree; + f = free_list; + free_list = free_list->f_back; + if (Py_SIZE(f) < extras) { + PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras); + if (new_f == NULL) { + PyObject_GC_Del(f); return NULL; } - /* No builtins! Make up a minimal one - Give them 'None', at least. */ - builtins = PyDict_New(); - if (builtins == NULL || - PyDict_SetItemString( - builtins, "None", Py_None) < 0) - return NULL; + f = new_f; } - else - Py_INCREF(builtins); + _Py_NewReference((PyObject *)f); + } +#endif + f->f_code = code; + extras = code->co_nlocals + ncells + nfrees; + f->f_valuestack = f->f_localsplus + extras; + for (Py_ssize_t i=0; if_localsplus[i] = NULL; } - else { + f->f_locals = NULL; + f->f_trace = NULL; + return f; +} + + +static inline PyObject * +frame_get_builtins(PyFrameObject *back, PyObject *globals) +{ + PyObject *builtins; + + if (back != NULL && back->f_globals == globals) { /* If we share the globals, we share the builtins. Save a lookup and a call. */ builtins = back->f_builtins; assert(builtins != NULL); Py_INCREF(builtins); + return builtins; } - if (code->co_zombieframe != NULL) { - f = code->co_zombieframe; - code->co_zombieframe = NULL; - _Py_NewReference((PyObject *)f); - assert(f->f_code == code); + + builtins = _PyDict_GetItemIdWithError(globals, &PyId___builtins__); + if (builtins != NULL && PyModule_Check(builtins)) { + builtins = PyModule_GetDict(builtins); + assert(builtins != NULL); } - else { - Py_ssize_t extras, ncells, nfrees; - ncells = PyTuple_GET_SIZE(code->co_cellvars); - nfrees = PyTuple_GET_SIZE(code->co_freevars); - extras = code->co_stacksize + code->co_nlocals + ncells + - nfrees; - if (free_list == NULL) { - f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, - extras); - if (f == NULL) { - Py_DECREF(builtins); - return NULL; - } - } - else { - assert(numfree > 0); - --numfree; - f = free_list; - free_list = free_list->f_back; - if (Py_SIZE(f) < extras) { - PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras); - if (new_f == NULL) { - PyObject_GC_Del(f); - Py_DECREF(builtins); - return NULL; - } - f = new_f; - } - _Py_NewReference((PyObject *)f); - } + if (builtins != NULL) { + Py_INCREF(builtins); + return builtins; + } + + if (PyErr_Occurred()) { + return NULL; + } + + /* No builtins! Make up a minimal one. + Give them 'None', at least. */ + builtins = PyDict_New(); + if (builtins == NULL) { + return NULL; + } + if (PyDict_SetItemString(builtins, "None", Py_None) < 0) { + Py_DECREF(builtins); + return NULL; + } + return builtins; +} - f->f_code = code; - extras = code->co_nlocals + ncells + nfrees; - f->f_valuestack = f->f_localsplus + extras; - for (i=0; if_localsplus[i] = NULL; - f->f_locals = NULL; - f->f_trace = NULL; + +PyFrameObject* _Py_HOT_FUNCTION +_PyFrame_New_NoTrack(PyThreadState *tstate, PyCodeObject *code, + PyObject *globals, PyObject *locals) +{ +#ifdef Py_DEBUG + if (code == NULL || globals == NULL || !PyDict_Check(globals) || + (locals != NULL && !PyMapping_Check(locals))) { + PyErr_BadInternalCall(); + return NULL; + } +#endif + + PyFrameObject *back = tstate->frame; + PyObject *builtins = frame_get_builtins(back, globals); + if (builtins == NULL) { + return NULL; } + + PyFrameObject *f = frame_alloc(code); + if (f == NULL) { + Py_DECREF(builtins); + return NULL; + } + f->f_stacktop = f->f_valuestack; f->f_builtins = builtins; Py_XINCREF(back); @@ -1142,6 +1183,7 @@ PyFrame_LocalsToFast(PyFrameObject *f, int clear) void _PyFrame_ClearFreeList(void) { +#if PyFrame_MAXFREELIST > 0 while (free_list != NULL) { PyFrameObject *f = free_list; free_list = free_list->f_back; @@ -1149,6 +1191,7 @@ _PyFrame_ClearFreeList(void) --numfree; } assert(numfree == 0); +#endif } void @@ -1161,9 +1204,11 @@ _PyFrame_Fini(void) void _PyFrame_DebugMallocStats(FILE *out) { +#if PyFrame_MAXFREELIST > 0 _PyDebugAllocatorStats(out, "free PyFrameObject", numfree, sizeof(PyFrameObject)); +#endif } diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index f8648d24f1c87..c0b59c009a2e9 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -22,6 +22,12 @@ class tuple "PyTupleObject *" "&PyTuple_Type" #define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */ #endif +/* bpo-40521: tuple free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyTuple_MAXSAVESIZE +# define PyTuple_MAXSAVESIZE 0 +#endif + #if PyTuple_MAXSAVESIZE > 0 /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty tuple () of which at most one instance will be allocated. @@ -248,7 +254,9 @@ tupledealloc(PyTupleObject *op) #endif } Py_TYPE(op)->tp_free((PyObject *)op); +#if PyTuple_MAXSAVESIZE > 0 done: +#endif Py_TRASHCAN_END } From webhook-mailer at python.org Tue May 5 13:56:53 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 17:56:53 -0000 Subject: [Python-checkins] bpo-40522: _PyThreadState_Swap() sets autoTSSkey (GH-19939) Message-ID: https://github.com/python/cpython/commit/e838a9324c1719bb917ca81ede8d766b5cb551f4 commit: e838a9324c1719bb917ca81ede8d766b5cb551f4 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T19:56:48+02:00 summary: bpo-40522: _PyThreadState_Swap() sets autoTSSkey (GH-19939) In the experimental isolated subinterpreters build mode, _PyThreadState_GET() gets the autoTSSkey variable and _PyThreadState_Swap() sets the autoTSSkey variable. * Add _PyThreadState_GetTSS() * _PyRuntimeState_GetThreadState() and _PyThreadState_GET() return _PyThreadState_GetTSS() * PyEval_SaveThread() sets the autoTSSkey variable to current Python thread state rather than NULL. * eval_frame_handle_pending() doesn't check that _PyThreadState_Swap() result is NULL. * _PyThreadState_Swap() gets the current Python thread state with _PyThreadState_GetTSS() rather than _PyRuntimeGILState_GetThreadState(). * PyGILState_Ensure() no longer checks _PyEval_ThreadsInitialized() since it cannot access the current interpreter. files: M Include/internal/pycore_pystate.h M Python/ceval.c M Python/pystate.c diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index c82e8db905188..d96ba31207001 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -49,8 +49,18 @@ _Py_ThreadCanHandlePendingCalls(void) /* Variable and macro for in-line access to current thread and interpreter state */ -static inline PyThreadState* _PyRuntimeState_GetThreadState(_PyRuntimeState *runtime) { +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +PyAPI_FUNC(PyThreadState*) _PyThreadState_GetTSS(void); +#endif + +static inline PyThreadState* +_PyRuntimeState_GetThreadState(_PyRuntimeState *runtime) +{ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + return _PyThreadState_GetTSS(); +#else return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->gilstate.tstate_current); +#endif } /* Get the current Python thread state. @@ -62,8 +72,14 @@ static inline PyThreadState* _PyRuntimeState_GetThreadState(_PyRuntimeState *run The caller must hold the GIL. See also PyThreadState_Get() and PyThreadState_GET(). */ -static inline PyThreadState *_PyThreadState_GET(void) { +static inline PyThreadState* +_PyThreadState_GET(void) +{ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + return _PyThreadState_GetTSS(); +#else return _PyRuntimeState_GetThreadState(&_PyRuntime); +#endif } /* Redefine PyThreadState_GET() as an alias to _PyThreadState_GET() */ diff --git a/Python/ceval.c b/Python/ceval.c index 0c08a76f7d113..b5854d3446463 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -380,9 +380,13 @@ PyEval_AcquireThread(PyThreadState *tstate) take_gil(tstate); struct _gilstate_runtime_state *gilstate = &tstate->interp->runtime->gilstate; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + (void)_PyThreadState_Swap(gilstate, tstate); +#else if (_PyThreadState_Swap(gilstate, tstate) != NULL) { Py_FatalError("non-NULL old thread state"); } +#endif } void @@ -443,7 +447,12 @@ PyThreadState * PyEval_SaveThread(void) { _PyRuntimeState *runtime = &_PyRuntime; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyThreadState *old_tstate = _PyThreadState_GET(); + PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, old_tstate); +#else PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, NULL); +#endif ensure_tstate_not_null(__func__, tstate); struct _ceval_runtime_state *ceval = &runtime->ceval; @@ -866,9 +875,13 @@ eval_frame_handle_pending(PyThreadState *tstate) take_gil(tstate); +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + (void)_PyThreadState_Swap(&runtime->gilstate, tstate); +#else if (_PyThreadState_Swap(&runtime->gilstate, tstate) != NULL) { Py_FatalError("orphan tstate"); } +#endif } /* Check for asynchronous exception. */ diff --git a/Python/pystate.c b/Python/pystate.c index dd95750027241..119fe31a84ba1 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -956,6 +956,14 @@ _PyThreadState_DeleteExcept(_PyRuntimeState *runtime, PyThreadState *tstate) } +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +PyThreadState* +_PyThreadState_GetTSS(void) { + return PyThread_tss_get(&_PyRuntime.gilstate.autoTSSkey); +} +#endif + + PyThreadState * _PyThreadState_UncheckedGet(void) { @@ -975,7 +983,11 @@ PyThreadState_Get(void) PyThreadState * _PyThreadState_Swap(struct _gilstate_runtime_state *gilstate, PyThreadState *newts) { +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyThreadState *oldts = _PyThreadState_GetTSS(); +#else PyThreadState *oldts = _PyRuntimeGILState_GetThreadState(gilstate); +#endif _PyRuntimeGILState_SetThreadState(gilstate, newts); /* It should not be possible for more than one thread state @@ -993,6 +1005,9 @@ _PyThreadState_Swap(struct _gilstate_runtime_state *gilstate, PyThreadState *new Py_FatalError("Invalid thread state for this thread"); errno = err; } +#endif +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyThread_tss_set(&gilstate->autoTSSkey, newts); #endif return oldts; } @@ -1363,7 +1378,9 @@ PyGILState_Ensure(void) /* Ensure that _PyEval_InitThreads() and _PyGILState_Init() have been called by Py_Initialize() */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS assert(_PyEval_ThreadsInitialized(runtime)); +#endif assert(gilstate->autoInterpreterState); PyThreadState *tcur = (PyThreadState *)PyThread_tss_get(&gilstate->autoTSSkey); From webhook-mailer at python.org Tue May 5 14:03:25 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 05 May 2020 18:03:25 -0000 Subject: [Python-checkins] bpo-40458: Increase reserved stack space to prevent overflow crash on Windows (GH-19845) Message-ID: https://github.com/python/cpython/commit/a6a116c1b964b3d1fdff0f533861ed2a2227de1f commit: a6a116c1b964b3d1fdff0f533861ed2a2227de1f branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-05T11:03:21-07:00 summary: bpo-40458: Increase reserved stack space to prevent overflow crash on Windows (GH-19845) (cherry picked from commit ac4bf424119d1300f57929120968e216a85d3a25) Co-authored-by: Steve Dower files: A Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst M PCbuild/python_uwp.vcxproj M PCbuild/pythonw_uwp.vcxproj diff --git a/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst new file mode 100644 index 0000000000000..4dc1ff480df87 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst @@ -0,0 +1 @@ +Increase reserved stack space to prevent overflow crash on Windows. diff --git a/PCbuild/python_uwp.vcxproj b/PCbuild/python_uwp.vcxproj index 5ff120a0da331..fb27e9e71222e 100644 --- a/PCbuild/python_uwp.vcxproj +++ b/PCbuild/python_uwp.vcxproj @@ -95,6 +95,7 @@ windowsapp.lib;%(AdditionalDependencies) Console + 2000000 diff --git a/PCbuild/pythonw_uwp.vcxproj b/PCbuild/pythonw_uwp.vcxproj index 828d0d1ccac21..e21e46a1b722e 100644 --- a/PCbuild/pythonw_uwp.vcxproj +++ b/PCbuild/pythonw_uwp.vcxproj @@ -95,6 +95,7 @@ windowsapp.lib;%(AdditionalDependencies) Windows + 2000000 From webhook-mailer at python.org Tue May 5 14:16:46 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 18:16:46 -0000 Subject: [Python-checkins] bpo-40513: new_interpreter() init GIL earlier (GH-19942) Message-ID: https://github.com/python/cpython/commit/0dd5e7a718997da2026ed64fe054dc36cae4fee7 commit: 0dd5e7a718997da2026ed64fe054dc36cae4fee7 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T20:16:37+02:00 summary: bpo-40513: new_interpreter() init GIL earlier (GH-19942) Fix also code to handle init_interp_main() failure. files: M Python/pylifecycle.c diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 5726a559cfcb7..2149d8928d596 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1575,19 +1575,19 @@ new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) } interp->config._isolated_interpreter = isolated_subinterpreter; - status = pycore_interp_init(tstate); + status = init_interp_create_gil(tstate); if (_PyStatus_EXCEPTION(status)) { goto error; } - status = init_interp_main(tstate); + status = pycore_interp_init(tstate); if (_PyStatus_EXCEPTION(status)) { goto error; } - status = init_interp_create_gil(tstate); + status = init_interp_main(tstate); if (_PyStatus_EXCEPTION(status)) { - return status; + goto error; } *tstate_p = tstate; From webhook-mailer at python.org Tue May 5 14:27:55 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 18:27:55 -0000 Subject: [Python-checkins] bpo-40513: Per-interpreter GIL (GH-19943) Message-ID: https://github.com/python/cpython/commit/7be4e350aadf93c4be5c97b7291d0db2b6bc1dc4 commit: 7be4e350aadf93c4be5c97b7291d0db2b6bc1dc4 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T20:27:47+02:00 summary: bpo-40513: Per-interpreter GIL (GH-19943) In the experimental isolated subinterpreters build mode, the GIL is now per-interpreter. Move gil from _PyRuntimeState.ceval to PyInterpreterState.ceval. new_interpreter() always get the config from the main interpreter. files: M Include/internal/pycore_ceval.h M Include/internal/pycore_interp.h M Include/internal/pycore_runtime.h M Python/ceval.c M Python/ceval_gil.h M Python/pylifecycle.c diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 18c8f027af16e..368990099089f 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -50,7 +50,11 @@ extern PyObject *_PyEval_EvalCode( PyObject *kwdefs, PyObject *closure, PyObject *name, PyObject *qualname); +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +extern int _PyEval_ThreadsInitialized(PyInterpreterState *interp); +#else extern int _PyEval_ThreadsInitialized(struct pyruntimestate *runtime); +#endif extern PyStatus _PyEval_InitGIL(PyThreadState *tstate); extern void _PyEval_FiniGIL(PyThreadState *tstate); diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 5bf8998e67320..26e7a473a12dc 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -46,6 +46,9 @@ struct _ceval_state { /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; struct _pending_calls pending; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state gil; +#endif }; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 34eb492b9f254..ebdc12b23a9ca 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -19,7 +19,9 @@ struct _ceval_runtime_state { the main thread of the main interpreter can handle signals: see _Py_ThreadCanHandleSignals(). */ _Py_atomic_int signals_pending; +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS struct _gil_runtime_state gil; +#endif }; /* GIL state */ diff --git a/Python/ceval.c b/Python/ceval.c index b5854d3446463..6435bd05446aa 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -250,6 +250,21 @@ ensure_tstate_not_null(const char *func, PyThreadState *tstate) } +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +int +_PyEval_ThreadsInitialized(PyInterpreterState *interp) +{ + return gil_created(&interp->ceval.gil); +} + +int +PyEval_ThreadsInitialized(void) +{ + // Fatal error if there is no current interpreter + PyInterpreterState *interp = PyInterpreterState_Get(); + return _PyEval_ThreadsInitialized(interp); +} +#else int _PyEval_ThreadsInitialized(_PyRuntimeState *runtime) { @@ -262,18 +277,25 @@ PyEval_ThreadsInitialized(void) _PyRuntimeState *runtime = &_PyRuntime; return _PyEval_ThreadsInitialized(runtime); } +#endif PyStatus _PyEval_InitGIL(PyThreadState *tstate) { +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS if (!_Py_IsMainInterpreter(tstate)) { /* Currently, the GIL is shared by all interpreters, and only the main interpreter is responsible to create and destroy it. */ return _PyStatus_OK(); } +#endif +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; +#else struct _gil_runtime_state *gil = &tstate->interp->runtime->ceval.gil; +#endif assert(!gil_created(gil)); PyThread_init_thread(); @@ -288,14 +310,20 @@ _PyEval_InitGIL(PyThreadState *tstate) void _PyEval_FiniGIL(PyThreadState *tstate) { +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS if (!_Py_IsMainInterpreter(tstate)) { /* Currently, the GIL is shared by all interpreters, and only the main interpreter is responsible to create and destroy it. */ return; } +#endif +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; +#else struct _gil_runtime_state *gil = &tstate->interp->runtime->ceval.gil; +#endif if (!gil_created(gil)) { /* First Py_InitializeFromConfig() call: the GIL doesn't exist yet: do nothing. */ @@ -413,13 +441,18 @@ PyEval_ReleaseThread(PyThreadState *tstate) void _PyEval_ReInitThreads(_PyRuntimeState *runtime) { + PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + ensure_tstate_not_null(__func__, tstate); + +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; +#else struct _gil_runtime_state *gil = &runtime->ceval.gil; +#endif if (!gil_created(gil)) { return; } recreate_gil(gil); - PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); - ensure_tstate_not_null(__func__, tstate); take_gil(tstate); @@ -457,7 +490,11 @@ PyEval_SaveThread(void) struct _ceval_runtime_state *ceval = &runtime->ceval; struct _ceval_state *ceval2 = &tstate->interp->ceval; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + assert(gil_created(&ceval2->gil)); +#else assert(gil_created(&ceval->gil)); +#endif drop_gil(ceval, ceval2, tstate); return tstate; } @@ -716,7 +753,9 @@ void _PyEval_InitRuntimeState(struct _ceval_runtime_state *ceval) { _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT; +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS _gil_initialize(&ceval->gil); +#endif } int @@ -731,6 +770,11 @@ _PyEval_InitState(struct _ceval_state *ceval) if (pending->lock == NULL) { return -1; } + +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + _gil_initialize(&ceval->gil); +#endif + return 0; } diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h index f25f810073294..56944b89237fb 100644 --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -144,7 +144,11 @@ static void drop_gil(struct _ceval_runtime_state *ceval, struct _ceval_state *ceval2, PyThreadState *tstate) { +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &ceval2->gil; +#else struct _gil_runtime_state *gil = &ceval->gil; +#endif if (!_Py_atomic_load_relaxed(&gil->locked)) { Py_FatalError("drop_gil: GIL is not locked"); } @@ -228,7 +232,11 @@ take_gil(PyThreadState *tstate) PyInterpreterState *interp = tstate->interp; struct _ceval_runtime_state *ceval = &interp->runtime->ceval; struct _ceval_state *ceval2 = &interp->ceval; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &ceval2->gil; +#else struct _gil_runtime_state *gil = &ceval->gil; +#endif /* Check that _PyEval_InitThreads() was called to create the lock */ assert(gil_created(gil)); @@ -320,10 +328,22 @@ take_gil(PyThreadState *tstate) void _PyEval_SetSwitchInterval(unsigned long microseconds) { - _PyRuntime.ceval.gil.interval = microseconds; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyInterpreterState *interp = PyInterpreterState_Get(); + struct _gil_runtime_state *gil = &interp->ceval.gil; +#else + struct _gil_runtime_state *gil = &_PyRuntime.ceval.gil; +#endif + gil->interval = microseconds; } unsigned long _PyEval_GetSwitchInterval() { - return _PyRuntime.ceval.gil.interval; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyInterpreterState *interp = PyInterpreterState_Get(); + struct _gil_runtime_state *gil = &interp->ceval.gil; +#else + struct _gil_runtime_state *gil = &_PyRuntime.ceval.gil; +#endif + return gil->interval; } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 2149d8928d596..da66a82ada70a 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1561,9 +1561,13 @@ new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) /* Copy the current interpreter config into the new interpreter */ const PyConfig *config; +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS if (save_tstate != NULL) { config = _PyInterpreterState_GetConfig(save_tstate->interp); - } else { + } + else +#endif + { /* No current thread state, copy from the main interpreter */ PyInterpreterState *main_interp = PyInterpreterState_Main(); config = _PyInterpreterState_GetConfig(main_interp); From webhook-mailer at python.org Tue May 5 14:33:11 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 05 May 2020 18:33:11 -0000 Subject: [Python-checkins] bpo-40513: _xxsubinterpreters.run_string() releases the GIL (GH-19944) Message-ID: https://github.com/python/cpython/commit/fb2c7c4afbab0514352ab0246b0c0cc85d1bba53 commit: fb2c7c4afbab0514352ab0246b0c0cc85d1bba53 branch: master author: Victor Stinner committer: GitHub date: 2020-05-05T20:33:06+02:00 summary: bpo-40513: _xxsubinterpreters.run_string() releases the GIL (GH-19944) In the experimental isolated subinterpreters build mode, _xxsubinterpreters.run_string() now releases the GIL. files: M Modules/_xxsubinterpretersmodule.c diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index de11c090870f9..8a6fce9e0b4bd 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1939,6 +1939,20 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, return -1; } +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + // Switch to interpreter. + PyThreadState *new_tstate = PyInterpreterState_ThreadHead(interp); + PyThreadState *save1 = PyEval_SaveThread(); + + (void)PyThreadState_Swap(new_tstate); + + // Run the script. + _sharedexception *exc = NULL; + int result = _run_script(interp, codestr, shared, &exc); + + // Switch back. + PyEval_RestoreThread(save1); +#else // Switch to interpreter. PyThreadState *save_tstate = NULL; if (interp != PyInterpreterState_Get()) { @@ -1956,6 +1970,7 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, if (save_tstate != NULL) { PyThreadState_Swap(save_tstate); } +#endif // Propagate any exception out to the caller. if (exc != NULL) { From webhook-mailer at python.org Tue May 5 15:41:05 2020 From: webhook-mailer at python.org (Curtis Bucher) Date: Tue, 05 May 2020 19:41:05 -0000 Subject: [Python-checkins] bpo-40355: Improve error messages in ast.literal_eval with malformed Dict nodes (GH-19868) Message-ID: https://github.com/python/cpython/commit/c21c51235aa8061da6b0593d6f857f42fd92fd8b commit: c21c51235aa8061da6b0593d6f857f42fd92fd8b branch: master author: Curtis Bucher committer: GitHub date: 2020-05-05T20:40:56+01:00 summary: bpo-40355: Improve error messages in ast.literal_eval with malformed Dict nodes (GH-19868) Co-authored-by: Pablo Galindo files: A Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst M Lib/ast.py M Lib/test/test_ast.py diff --git a/Lib/ast.py b/Lib/ast.py index 5c68c4a66e1dd..7a43581c0e6ce 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -62,11 +62,12 @@ def literal_eval(node_or_string): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, Expression): node_or_string = node_or_string.body + def _raise_malformed_node(node): + raise ValueError(f'malformed node or string: {node!r}') def _convert_num(node): - if isinstance(node, Constant): - if type(node.value) in (int, float, complex): - return node.value - raise ValueError('malformed node or string: ' + repr(node)) + if not isinstance(node, Constant) or type(node.value) not in (int, float, complex): + _raise_malformed_node(node) + return node.value def _convert_signed_num(node): if isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)): operand = _convert_num(node.operand) @@ -88,6 +89,8 @@ def _convert(node): node.func.id == 'set' and node.args == node.keywords == []): return set() elif isinstance(node, Dict): + if len(node.keys) != len(node.values): + _raise_malformed_node(node) return dict(zip(map(_convert, node.keys), map(_convert, node.values))) elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)): diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 9063b3d2d7b74..a8a13fdcd7426 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -965,6 +965,12 @@ def test_literal_eval_complex(self): self.assertRaises(ValueError, ast.literal_eval, '3+(0+6j)') self.assertRaises(ValueError, ast.literal_eval, '-(3+6j)') + def test_literal_eval_malformed_dict_nodes(self): + malformed = ast.Dict(keys=[ast.Constant(1), ast.Constant(2)], values=[ast.Constant(3)]) + self.assertRaises(ValueError, ast.literal_eval, malformed) + malformed = ast.Dict(keys=[ast.Constant(1)], values=[ast.Constant(2), ast.Constant(3)]) + self.assertRaises(ValueError, ast.literal_eval, malformed) + def test_bad_integer(self): # issue13436: Bad error message with invalid numeric values body = [ast.ImportFrom(module='time', diff --git a/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst new file mode 100644 index 0000000000000..81f9e937a2bff --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst @@ -0,0 +1,2 @@ +Improve error reporting in :func:`ast.literal_eval` in the presence of malformed :class:`ast.Dict` +nodes instead of silently ignoring any non-conforming elements. Patch by Curtis Bucher. From webhook-mailer at python.org Tue May 5 16:01:05 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 05 May 2020 20:01:05 -0000 Subject: [Python-checkins] bpo-40355: Improve error messages in ast.literal_eval with malformed Dict nodes (GH-19868) Message-ID: https://github.com/python/cpython/commit/2a3b876b0286b22a9058510d9e51dc4d60eeb89a commit: 2a3b876b0286b22a9058510d9e51dc4d60eeb89a branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-05T13:00:57-07:00 summary: bpo-40355: Improve error messages in ast.literal_eval with malformed Dict nodes (GH-19868) Co-authored-by: Pablo Galindo (cherry picked from commit c21c51235aa8061da6b0593d6f857f42fd92fd8b) Co-authored-by: Curtis Bucher files: A Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst M Lib/ast.py M Lib/test/test_ast.py diff --git a/Lib/ast.py b/Lib/ast.py index 157a8332ebcc5..0c88bcf4c821e 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -59,11 +59,12 @@ def literal_eval(node_or_string): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, Expression): node_or_string = node_or_string.body + def _raise_malformed_node(node): + raise ValueError(f'malformed node or string: {node!r}') def _convert_num(node): - if isinstance(node, Constant): - if type(node.value) in (int, float, complex): - return node.value - raise ValueError('malformed node or string: ' + repr(node)) + if not isinstance(node, Constant) or type(node.value) not in (int, float, complex): + _raise_malformed_node(node) + return node.value def _convert_signed_num(node): if isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)): operand = _convert_num(node.operand) @@ -82,6 +83,8 @@ def _convert(node): elif isinstance(node, Set): return set(map(_convert, node.elts)) elif isinstance(node, Dict): + if len(node.keys) != len(node.values): + _raise_malformed_node(node) return dict(zip(map(_convert, node.keys), map(_convert, node.values))) elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)): diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 3e8a39dc41047..8887558ce4c31 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -869,6 +869,12 @@ def test_literal_eval_complex(self): self.assertRaises(ValueError, ast.literal_eval, '3+(0+6j)') self.assertRaises(ValueError, ast.literal_eval, '-(3+6j)') + def test_literal_eval_malformed_dict_nodes(self): + malformed = ast.Dict(keys=[ast.Constant(1), ast.Constant(2)], values=[ast.Constant(3)]) + self.assertRaises(ValueError, ast.literal_eval, malformed) + malformed = ast.Dict(keys=[ast.Constant(1)], values=[ast.Constant(2), ast.Constant(3)]) + self.assertRaises(ValueError, ast.literal_eval, malformed) + def test_bad_integer(self): # issue13436: Bad error message with invalid numeric values body = [ast.ImportFrom(module='time', diff --git a/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst new file mode 100644 index 0000000000000..81f9e937a2bff --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst @@ -0,0 +1,2 @@ +Improve error reporting in :func:`ast.literal_eval` in the presence of malformed :class:`ast.Dict` +nodes instead of silently ignoring any non-conforming elements. Patch by Curtis Bucher. From webhook-mailer at python.org Tue May 5 17:14:40 2020 From: webhook-mailer at python.org (Dennis Sweeney) Date: Tue, 05 May 2020 21:14:40 -0000 Subject: [Python-checkins] bpo-40504: Allow weakrefs to lru_cache objects (GH-19938) Message-ID: https://github.com/python/cpython/commit/1253c3ef70ea5632d32ae19579a14152db0d45c1 commit: 1253c3ef70ea5632d32ae19579a14152db0d45c1 branch: master author: Dennis Sweeney <36520290+sweeneyde at users.noreply.github.com> committer: GitHub date: 2020-05-05T14:14:32-07:00 summary: bpo-40504: Allow weakrefs to lru_cache objects (GH-19938) files: A Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst M Lib/test/test_functools.py M Modules/_functoolsmodule.c diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 9503f4086b1cb..b3893a15566fa 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -14,6 +14,8 @@ import unittest import unittest.mock import os +import weakref +import gc from weakref import proxy import contextlib @@ -1938,6 +1940,35 @@ def f(): return 1 self.assertEqual(f.cache_parameters(), {'maxsize': 1000, "typed": True}) + def test_lru_cache_weakrefable(self): + @self.module.lru_cache + def test_function(x): + return x + + class A: + @self.module.lru_cache + def test_method(self, x): + return (self, x) + + @staticmethod + @self.module.lru_cache + def test_staticmethod(x): + return (self, x) + + refs = [weakref.ref(test_function), + weakref.ref(A.test_method), + weakref.ref(A.test_staticmethod)] + + for ref in refs: + self.assertIsNotNone(ref()) + + del A + del test_function + gc.collect() + + for ref in refs: + self.assertIsNone(ref()) + @py_functools.lru_cache() def py_cached_func(x, y): diff --git a/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst b/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst new file mode 100644 index 0000000000000..261a49e432928 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst @@ -0,0 +1 @@ +:func:`functools.lru_cache` objects can now be the targets of weakrefs. \ No newline at end of file diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index fd4b4c268cc97..d158d3bae157b 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -783,6 +783,7 @@ typedef struct lru_cache_object { Py_ssize_t misses; PyObject *cache_info_type; PyObject *dict; + PyObject *weakreflist; } lru_cache_object; static PyTypeObject lru_cache_type; @@ -1196,6 +1197,7 @@ lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw) Py_INCREF(cache_info_type); obj->cache_info_type = cache_info_type; obj->dict = NULL; + obj->weakreflist = NULL; return (PyObject *)obj; } @@ -1227,6 +1229,8 @@ lru_cache_dealloc(lru_cache_object *obj) lru_list_elem *list; /* bpo-31095: UnTrack is needed before calling any callbacks */ PyObject_GC_UnTrack(obj); + if (obj->weakreflist != NULL) + PyObject_ClearWeakRefs((PyObject*)obj); list = lru_cache_unlink_list(obj); Py_XDECREF(obj->cache); @@ -1384,7 +1388,8 @@ static PyTypeObject lru_cache_type = { (traverseproc)lru_cache_tp_traverse,/* tp_traverse */ (inquiry)lru_cache_tp_clear, /* tp_clear */ 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ + offsetof(lru_cache_object, weakreflist), + /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ lru_cache_methods, /* tp_methods */ From webhook-mailer at python.org Tue May 5 17:58:29 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Tue, 05 May 2020 21:58:29 -0000 Subject: [Python-checkins] bpo-40523: Add pass-throughs for hash() and reversed() to weakref.proxy objects (GH-19946) Message-ID: https://github.com/python/cpython/commit/96074de573f82fc66a2bd73c36905141a3f1d5c1 commit: 96074de573f82fc66a2bd73c36905141a3f1d5c1 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-05T22:58:19+01:00 summary: bpo-40523: Add pass-throughs for hash() and reversed() to weakref.proxy objects (GH-19946) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst M Lib/test/test_weakref.py M Objects/weakrefobject.c diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 563507fee3d7e..56a42f055d0b5 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -411,6 +411,26 @@ def __iter__(self): # can be killed in the middle of the call "blech" in p + def test_proxy_reversed(self): + class MyObj: + def __len__(self): + return 3 + def __reversed__(self): + return iter('cba') + + obj = MyObj() + self.assertEqual("".join(reversed(weakref.proxy(obj))), "cba") + + def test_proxy_hash(self): + cool_hash = 299_792_458 + + class MyObj: + def __hash__(self): + return cool_hash + + obj = MyObj() + self.assertEqual(hash(weakref.proxy(obj)), cool_hash) + def test_getweakrefcount(self): o = C() ref1 = weakref.ref(o) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst new file mode 100644 index 0000000000000..14f05be59a1ed --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst @@ -0,0 +1,2 @@ +Add pass-throughs for :func:`hash` and :func:`reversed` to +:class:`weakref.proxy` objects. Patch by Pablo Galindo. diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index 9640d93aaf2da..313e8abab5a25 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -665,10 +665,12 @@ proxy_iternext(PyWeakReference *proxy) WRAP_METHOD(proxy_bytes, __bytes__) +WRAP_METHOD(proxy_reversed, __reversed__) static PyMethodDef proxy_methods[] = { {"__bytes__", proxy_bytes, METH_NOARGS}, + {"__reversed__", proxy_reversed, METH_NOARGS}, {NULL, NULL} }; @@ -730,6 +732,21 @@ static PyMappingMethods proxy_as_mapping = { }; +static Py_hash_t +proxy_hash(PyObject *self) +{ + PyWeakReference *proxy = (PyWeakReference *)self; + if (!proxy_checkref(proxy)) { + return -1; + } + PyObject *obj = PyWeakref_GET_OBJECT(proxy); + Py_INCREF(obj); + Py_hash_t res = PyObject_Hash(obj); + Py_DECREF(obj); + return res; +} + + PyTypeObject _PyWeakref_ProxyType = { PyVarObject_HEAD_INIT(&PyType_Type, 0) @@ -746,7 +763,7 @@ _PyWeakref_ProxyType = { &proxy_as_number, /* tp_as_number */ &proxy_as_sequence, /* tp_as_sequence */ &proxy_as_mapping, /* tp_as_mapping */ - 0, /* tp_hash */ + proxy_hash, /* tp_hash */ 0, /* tp_call */ proxy_str, /* tp_str */ proxy_getattr, /* tp_getattro */ From webhook-mailer at python.org Tue May 5 20:32:22 2020 From: webhook-mailer at python.org (Robert Rouhani) Date: Wed, 06 May 2020 00:32:22 -0000 Subject: [Python-checkins] [3.8] bpo-40417: Fix deprecation warning in PyImport_ReloadModule (GH-19750) (GH-19934) Message-ID: https://github.com/python/cpython/commit/a32587a60da5939a3932bb30432d2bdd3d6203d4 commit: a32587a60da5939a3932bb30432d2bdd3d6203d4 branch: 3.8 author: Robert Rouhani committer: GitHub date: 2020-05-05T17:32:14-07:00 summary: [3.8] bpo-40417: Fix deprecation warning in PyImport_ReloadModule (GH-19750) (GH-19934) Automerge-Triggered-By: @brettcannon. (cherry picked from commit f40bd466bf14029e2687e36e965875adf9d4be1a) Co-authored-by: Robert Rouhani files: A Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst M Python/import.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst new file mode 100644 index 0000000000000..932e853a8933d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst @@ -0,0 +1 @@ +Fix imp module deprecation warning when PyImport_ReloadModule is called. Patch by Robert Rouhani. diff --git a/Python/import.c b/Python/import.c index b4074d1dfc3fa..b73fe2f93e26c 100644 --- a/Python/import.c +++ b/Python/import.c @@ -1908,23 +1908,23 @@ PyImport_ImportModuleLevel(const char *name, PyObject *globals, PyObject *locals PyObject * PyImport_ReloadModule(PyObject *m) { - _Py_IDENTIFIER(imp); + _Py_IDENTIFIER(importlib); _Py_IDENTIFIER(reload); PyObject *reloaded_module = NULL; - PyObject *imp = _PyImport_GetModuleId(&PyId_imp); - if (imp == NULL) { + PyObject *importlib = _PyImport_GetModuleId(&PyId_importlib); + if (importlib == NULL) { if (PyErr_Occurred()) { return NULL; } - imp = PyImport_ImportModule("imp"); - if (imp == NULL) { + importlib = PyImport_ImportModule("importlib"); + if (importlib == NULL) { return NULL; } } - reloaded_module = _PyObject_CallMethodIdObjArgs(imp, &PyId_reload, m, NULL); - Py_DECREF(imp); + reloaded_module = _PyObject_CallMethodIdObjArgs(importlib, &PyId_reload, m, NULL); + Py_DECREF(importlib); return reloaded_module; } From webhook-mailer at python.org Tue May 5 20:49:37 2020 From: webhook-mailer at python.org (Robert Rouhani) Date: Wed, 06 May 2020 00:49:37 -0000 Subject: [Python-checkins] [3.7] bpo-40417: Fix deprecation warning in PyImport_ReloadModule (GH-19750) (GH-19935) Message-ID: https://github.com/python/cpython/commit/d64fd617e02346ecbcba9559f227936e08e89602 commit: d64fd617e02346ecbcba9559f227936e08e89602 branch: 3.7 author: Robert Rouhani committer: GitHub date: 2020-05-05T17:49:29-07:00 summary: [3.7] bpo-40417: Fix deprecation warning in PyImport_ReloadModule (GH-19750) (GH-19935) Use importlib instead of imp. Automerge-Triggered-By: @brettcannon. (cherry picked from commit f40bd46) Co-authored-by: Robert Rouhani robert.rouhani at gmail.com files: A Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst M Python/import.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst new file mode 100644 index 0000000000000..932e853a8933d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst @@ -0,0 +1 @@ +Fix imp module deprecation warning when PyImport_ReloadModule is called. Patch by Robert Rouhani. diff --git a/Python/import.c b/Python/import.c index edc59249622b1..6d014cf5b008f 100644 --- a/Python/import.c +++ b/Python/import.c @@ -1858,23 +1858,23 @@ PyImport_ImportModuleLevel(const char *name, PyObject *globals, PyObject *locals PyObject * PyImport_ReloadModule(PyObject *m) { - _Py_IDENTIFIER(imp); + _Py_IDENTIFIER(importlib); _Py_IDENTIFIER(reload); PyObject *reloaded_module = NULL; - PyObject *imp = _PyImport_GetModuleId(&PyId_imp); - if (imp == NULL) { + PyObject *importlib = _PyImport_GetModuleId(&PyId_importlib); + if (importlib == NULL) { if (PyErr_Occurred()) { return NULL; } - imp = PyImport_ImportModule("imp"); - if (imp == NULL) { + importlib = PyImport_ImportModule("importlib"); + if (importlib == NULL) { return NULL; } } - reloaded_module = _PyObject_CallMethodIdObjArgs(imp, &PyId_reload, m, NULL); - Py_DECREF(imp); + reloaded_module = _PyObject_CallMethodIdObjArgs(importlib, &PyId_reload, m, NULL); + Py_DECREF(importlib); return reloaded_module; } From webhook-mailer at python.org Tue May 5 22:28:32 2020 From: webhook-mailer at python.org (Tim Peters) Date: Wed, 06 May 2020 02:28:32 -0000 Subject: [Python-checkins] bpo-40480 "fnmatch" exponential execution time (GH-19908) Message-ID: https://github.com/python/cpython/commit/b9c46a2c2d7fc68457bff641f78932d66f5e5f59 commit: b9c46a2c2d7fc68457bff641f78932d66f5e5f59 branch: master author: Tim Peters committer: GitHub date: 2020-05-05T21:28:24-05:00 summary: bpo-40480 "fnmatch" exponential execution time (GH-19908) bpo-40480: create different regexps in the presence of multiple `*` patterns to prevent fnmatch() from taking exponential time. files: A Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst M Lib/fnmatch.py M Lib/test/test_fnmatch.py diff --git a/Lib/fnmatch.py b/Lib/fnmatch.py index b98e6413295e1..d7d915d51314d 100644 --- a/Lib/fnmatch.py +++ b/Lib/fnmatch.py @@ -77,15 +77,19 @@ def translate(pat): There is no way to quote meta-characters. """ + STAR = object() + res = [] + add = res.append i, n = 0, len(pat) - res = '' while i < n: c = pat[i] i = i+1 if c == '*': - res = res + '.*' + # compress consecutive `*` into one + if (not res) or res[-1] is not STAR: + add(STAR) elif c == '?': - res = res + '.' + add('.') elif c == '[': j = i if j < n and pat[j] == '!': @@ -95,7 +99,7 @@ def translate(pat): while j < n and pat[j] != ']': j = j+1 if j >= n: - res = res + '\\[' + add('\\[') else: stuff = pat[i:j] if '--' not in stuff: @@ -122,7 +126,49 @@ def translate(pat): stuff = '^' + stuff[1:] elif stuff[0] in ('^', '['): stuff = '\\' + stuff - res = '%s[%s]' % (res, stuff) + add(f'[{stuff}]') else: - res = res + re.escape(c) - return r'(?s:%s)\Z' % res + add(re.escape(c)) + assert i == n + + # Deal with STARs. + inp = res + res = [] + add = res.append + i, n = 0, len(inp) + # Fixed pieces at the start? + while i < n and inp[i] is not STAR: + add(inp[i]) + i += 1 + # Now deal with STAR fixed STAR fixed ... + # For an interior `STAR fixed` pairing, we want to do a minimal + # .*? match followed by `fixed`, with no possibility of backtracking. + # We can't spell that directly, but can trick it into working by matching + # .*?fixed + # in a lookahead assertion, save the matched part in a group, then + # consume that group via a backreference. If the overall match fails, + # the lookahead assertion won't try alternatives. So the translation is: + # (?=(P.*?fixed))(?P=name) + # Group names are created as needed: g1, g2, g3, ... + groupnum = 0 + while i < n: + assert inp[i] is STAR + i += 1 + if i == n: + add(".*") + break + assert inp[i] is not STAR + fixed = [] + while i < n and inp[i] is not STAR: + fixed.append(inp[i]) + i += 1 + fixed = "".join(fixed) + if i == n: + add(".*") + add(fixed) + else: + groupnum += 1 + add(f"(?=(?P.*?{fixed}))(?P=g{groupnum})") + assert i == n + res = "".join(res) + return fr'(?s:{res})\Z' diff --git a/Lib/test/test_fnmatch.py b/Lib/test/test_fnmatch.py index 55f9f0d3a5425..4c173069503cc 100644 --- a/Lib/test/test_fnmatch.py +++ b/Lib/test/test_fnmatch.py @@ -45,6 +45,13 @@ def test_fnmatch(self): check('\nfoo', 'foo*', False) check('\n', '*') + def test_slow_fnmatch(self): + check = self.check_match + check('a' * 50, '*a*a*a*a*a*a*a*a*a*a') + # The next "takes forever" if the regexp translation is + # straightforward. See bpo-40480. + check('a' * 50 + 'b', '*a*a*a*a*a*a*a*a*a*a', False) + def test_mix_bytes_str(self): self.assertRaises(TypeError, fnmatch, 'test', b'*') self.assertRaises(TypeError, fnmatch, b'test', '*') @@ -107,6 +114,16 @@ def test_translate(self): self.assertEqual(translate('[!x]'), r'(?s:[^x])\Z') self.assertEqual(translate('[^x]'), r'(?s:[\^x])\Z') self.assertEqual(translate('[x'), r'(?s:\[x)\Z') + # from the docs + self.assertEqual(translate('*.txt'), r'(?s:.*\.txt)\Z') + # squash consecutive stars + self.assertEqual(translate('*********'), r'(?s:.*)\Z') + self.assertEqual(translate('A*********'), r'(?s:A.*)\Z') + self.assertEqual(translate('*********A'), r'(?s:.*A)\Z') + self.assertEqual(translate('A*********?[?]?'), r'(?s:A.*.[?].)\Z') + # fancy translation to prevent exponential-time match failure + self.assertEqual(translate('**a*a****a'), + r'(?s:(?=(?P.*?a))(?P=g1)(?=(?P.*?a))(?P=g2).*a)\Z') class FilterTestCase(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst b/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst new file mode 100644 index 0000000000000..d046b1422419d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst @@ -0,0 +1 @@ +``fnmatch.fnmatch()`` could take exponential time in the presence of multiple ``*`` pattern characters. This was repaired by generating more elaborate regular expressions to avoid futile backtracking. \ No newline at end of file From webhook-mailer at python.org Wed May 6 01:24:49 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Wed, 06 May 2020 05:24:49 -0000 Subject: [Python-checkins] bpo-40517: Implement syntax highlighting support for ASDL (#19928) Message-ID: https://github.com/python/cpython/commit/d60040ba226bd2e3b6f58d074015aa2499dc1cb8 commit: d60040ba226bd2e3b6f58d074015aa2499dc1cb8 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-05T22:24:39-07:00 summary: bpo-40517: Implement syntax highlighting support for ASDL (#19928) files: A Doc/tools/extensions/asdl_highlight.py M Doc/conf.py M Doc/library/ast.rst diff --git a/Doc/conf.py b/Doc/conf.py index 32db34344a70a..12d74ea24ce4a 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -14,7 +14,8 @@ # --------------------- extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest', - 'pyspecific', 'c_annotations', 'escape4chm'] + 'pyspecific', 'c_annotations', 'escape4chm', + 'asdl_highlight'] doctest_global_setup = ''' diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index fc04114949c0c..6c6ad01b842c8 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -35,7 +35,7 @@ Abstract Grammar The abstract grammar is currently defined as follows: .. literalinclude:: ../../Parser/Python.asdl - :language: none + :language: asdl Node classes diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/asdl_highlight.py new file mode 100644 index 0000000000000..9b003e9905c56 --- /dev/null +++ b/Doc/tools/extensions/asdl_highlight.py @@ -0,0 +1,51 @@ +import os +import sys +sys.path.append(os.path.abspath("../Parser/")) + +from pygments.lexer import RegexLexer, bygroups, include, words +from pygments.token import (Comment, Generic, Keyword, Name, Operator, + Punctuation, Text) + +from asdl import builtin_types +from sphinx.highlighting import lexers + +class ASDLLexer(RegexLexer): + name = "ASDL" + aliases = ["asdl"] + filenames = ["*.asdl"] + _name = r"([^\W\d]\w*)" + _text_ws = r"(\s*)" + + tokens = { + "ws": [ + (r"\n", Text), + (r"\s+", Text), + (r"--.*?$", Comment.Singleline), + ], + "root": [ + include("ws"), + ( + r"(module)" + _text_ws + _name, + bygroups(Keyword, Text, Name.Class), + ), + ( + r"(\w+)(\*\s|\?\s|\s)(\w+)", + bygroups(Name.Variable, Generic.Strong, Name.Tag), + ), + (words(builtin_types), Keyword.Type), + (r"attributes", Name.Builtin), + ( + _name + _text_ws + "(=)", + bygroups(Name.Variable, Text, Operator), + ), + (_name, Name.Function), + (r"\|", Operator), + (r"{|}|\(|\)", Punctuation), + (r".", Text), + ], + } + + +def setup(app): + lexers["asdl"] = ASDLLexer() + return {'version': '1.0', 'parallel_read_safe': True} From webhook-mailer at python.org Wed May 6 01:34:03 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Wed, 06 May 2020 05:34:03 -0000 Subject: [Python-checkins] Revert "bpo-40517: Implement syntax highlighting support for ASDL (#19928)" (#19950) Message-ID: https://github.com/python/cpython/commit/eff870b618ca6f6b7a60a271f15af7e54b8a1b97 commit: eff870b618ca6f6b7a60a271f15af7e54b8a1b97 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-05T22:33:55-07:00 summary: Revert "bpo-40517: Implement syntax highlighting support for ASDL (#19928)" (#19950) This reverts commit d60040ba226bd2e3b6f58d074015aa2499dc1cb8. files: D Doc/tools/extensions/asdl_highlight.py M Doc/conf.py M Doc/library/ast.rst diff --git a/Doc/conf.py b/Doc/conf.py index 12d74ea24ce4a..32db34344a70a 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -14,8 +14,7 @@ # --------------------- extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest', - 'pyspecific', 'c_annotations', 'escape4chm', - 'asdl_highlight'] + 'pyspecific', 'c_annotations', 'escape4chm'] doctest_global_setup = ''' diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index 6c6ad01b842c8..fc04114949c0c 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -35,7 +35,7 @@ Abstract Grammar The abstract grammar is currently defined as follows: .. literalinclude:: ../../Parser/Python.asdl - :language: asdl + :language: none Node classes diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/asdl_highlight.py deleted file mode 100644 index 9b003e9905c56..0000000000000 --- a/Doc/tools/extensions/asdl_highlight.py +++ /dev/null @@ -1,51 +0,0 @@ -import os -import sys -sys.path.append(os.path.abspath("../Parser/")) - -from pygments.lexer import RegexLexer, bygroups, include, words -from pygments.token import (Comment, Generic, Keyword, Name, Operator, - Punctuation, Text) - -from asdl import builtin_types -from sphinx.highlighting import lexers - -class ASDLLexer(RegexLexer): - name = "ASDL" - aliases = ["asdl"] - filenames = ["*.asdl"] - _name = r"([^\W\d]\w*)" - _text_ws = r"(\s*)" - - tokens = { - "ws": [ - (r"\n", Text), - (r"\s+", Text), - (r"--.*?$", Comment.Singleline), - ], - "root": [ - include("ws"), - ( - r"(module)" + _text_ws + _name, - bygroups(Keyword, Text, Name.Class), - ), - ( - r"(\w+)(\*\s|\?\s|\s)(\w+)", - bygroups(Name.Variable, Generic.Strong, Name.Tag), - ), - (words(builtin_types), Keyword.Type), - (r"attributes", Name.Builtin), - ( - _name + _text_ws + "(=)", - bygroups(Name.Variable, Text, Operator), - ), - (_name, Name.Function), - (r"\|", Operator), - (r"{|}|\(|\)", Punctuation), - (r".", Text), - ], - } - - -def setup(app): - lexers["asdl"] = ASDLLexer() - return {'version': '1.0', 'parallel_read_safe': True} From webhook-mailer at python.org Wed May 6 09:22:22 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 06 May 2020 13:22:22 -0000 Subject: [Python-checkins] bpo-40527: Fix command line argument parsing (GH-19955) Message-ID: https://github.com/python/cpython/commit/2668a9a5aa506a048aef7b4881c8dcf6b81c6870 commit: 2668a9a5aa506a048aef7b4881c8dcf6b81c6870 branch: master author: Victor Stinner committer: GitHub date: 2020-05-06T22:22:17+09:00 summary: bpo-40527: Fix command line argument parsing (GH-19955) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst M Lib/test/test_cmd_line.py M Python/getopt.c diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index ee96473322dba..724402533038d 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -756,6 +756,17 @@ def test_argv0_normalization(self): self.assertEqual(proc.returncode, 0, proc) self.assertEqual(proc.stdout.strip(), b'0') + def test_parsing_error(self): + args = [sys.executable, '-I', '--unknown-option'] + proc = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True) + err_msg = "unknown option --unknown-option\nusage: " + self.assertTrue(proc.stderr.startswith(err_msg), proc.stderr) + self.assertNotEqual(proc.returncode, 0) + + @unittest.skipIf(interpreter_requires_environment(), 'Cannot run -I tests when PYTHON env vars are required.') class IgnoreEnvironmentTest(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst new file mode 100644 index 0000000000000..19b8888230c65 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst @@ -0,0 +1,2 @@ +Fix command line argument parsing: no longer write errors multiple times +into stderr. diff --git a/Python/getopt.c b/Python/getopt.c index 708d9ce496287..2e3891aae2d16 100644 --- a/Python/getopt.c +++ b/Python/getopt.c @@ -101,7 +101,9 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) if (option == L'-') { // Parse long option. if (*opt_ptr == L'\0') { - fprintf(stderr, "expected long option\n"); + if (_PyOS_opterr) { + fprintf(stderr, "expected long option\n"); + } return -1; } *longindex = 0; @@ -111,7 +113,9 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) break; } if (!opt->name) { - fprintf(stderr, "unknown option %ls\n", argv[_PyOS_optind - 1]); + if (_PyOS_opterr) { + fprintf(stderr, "unknown option %ls\n", argv[_PyOS_optind - 1]); + } return '_'; } opt_ptr = L""; @@ -119,8 +123,10 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) return opt->val; } if (_PyOS_optind >= argc) { - fprintf(stderr, "Argument expected for the %ls options\n", - argv[_PyOS_optind - 1]); + if (_PyOS_opterr) { + fprintf(stderr, "Argument expected for the %ls options\n", + argv[_PyOS_optind - 1]); + } return '_'; } _PyOS_optarg = argv[_PyOS_optind++]; @@ -128,14 +134,16 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) } if (option == 'J') { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "-J is reserved for Jython\n"); + } return '_'; } if ((ptr = wcschr(SHORT_OPTS, option)) == NULL) { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "Unknown option: -%c\n", (char)option); + } return '_'; } @@ -147,9 +155,10 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) else { if (_PyOS_optind >= argc) { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "Argument expected for the -%c option\n", (char)option); + } return '_'; } From webhook-mailer at python.org Wed May 6 09:43:14 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 06 May 2020 13:43:14 -0000 Subject: [Python-checkins] bpo-40527: Fix command line argument parsing (GH-19955) Message-ID: https://github.com/python/cpython/commit/bce4ddafdd188cc6deb1584728b67b9e149ca6a4 commit: bce4ddafdd188cc6deb1584728b67b9e149ca6a4 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-06T06:43:09-07:00 summary: bpo-40527: Fix command line argument parsing (GH-19955) (cherry picked from commit 2668a9a5aa506a048aef7b4881c8dcf6b81c6870) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst M Lib/test/test_cmd_line.py M Python/getopt.c diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index 497bfa9eb89de..5fc5bff6603e2 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -740,6 +740,17 @@ def test_argv0_normalization(self): self.assertEqual(proc.returncode, 0, proc) self.assertEqual(proc.stdout.strip(), b'0') + def test_parsing_error(self): + args = [sys.executable, '-I', '--unknown-option'] + proc = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True) + err_msg = "unknown option --unknown-option\nusage: " + self.assertTrue(proc.stderr.startswith(err_msg), proc.stderr) + self.assertNotEqual(proc.returncode, 0) + + @unittest.skipIf(interpreter_requires_environment(), 'Cannot run -I tests when PYTHON env vars are required.') class IgnoreEnvironmentTest(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst new file mode 100644 index 0000000000000..19b8888230c65 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst @@ -0,0 +1,2 @@ +Fix command line argument parsing: no longer write errors multiple times +into stderr. diff --git a/Python/getopt.c b/Python/getopt.c index 1a7db3fce888e..249ad1e873607 100644 --- a/Python/getopt.c +++ b/Python/getopt.c @@ -105,7 +105,9 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) if (option == L'-') { // Parse long option. if (*opt_ptr == L'\0') { - fprintf(stderr, "expected long option\n"); + if (_PyOS_opterr) { + fprintf(stderr, "expected long option\n"); + } return -1; } *longindex = 0; @@ -115,7 +117,9 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) break; } if (!opt->name) { - fprintf(stderr, "unknown option %ls\n", argv[_PyOS_optind - 1]); + if (_PyOS_opterr) { + fprintf(stderr, "unknown option %ls\n", argv[_PyOS_optind - 1]); + } return '_'; } opt_ptr = L""; @@ -123,8 +127,10 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) return opt->val; } if (_PyOS_optind >= argc) { - fprintf(stderr, "Argument expected for the %ls options\n", - argv[_PyOS_optind - 1]); + if (_PyOS_opterr) { + fprintf(stderr, "Argument expected for the %ls options\n", + argv[_PyOS_optind - 1]); + } return '_'; } _PyOS_optarg = argv[_PyOS_optind++]; @@ -132,14 +138,16 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) } if (option == 'J') { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "-J is reserved for Jython\n"); + } return '_'; } if ((ptr = wcschr(SHORT_OPTS, option)) == NULL) { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "Unknown option: -%c\n", (char)option); + } return '_'; } @@ -151,9 +159,10 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) else { if (_PyOS_optind >= argc) { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "Argument expected for the -%c option\n", (char)option); + } return '_'; } From webhook-mailer at python.org Wed May 6 10:29:41 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Wed, 06 May 2020 14:29:41 -0000 Subject: [Python-checkins] bpo-40528: Improve and clear several aspects of the ASDL definition code for the AST (GH-19952) Message-ID: https://github.com/python/cpython/commit/091951a67c832db83c60f4eb22f1fb474b70e635 commit: 091951a67c832db83c60f4eb22f1fb474b70e635 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-06T15:29:32+01:00 summary: bpo-40528: Improve and clear several aspects of the ASDL definition code for the AST (GH-19952) files: M Include/asdl.h M Lib/test/test_ast.py M Parser/Python.asdl M Parser/asdl.py M Parser/asdl_c.py M Python/Python-ast.c diff --git a/Include/asdl.h b/Include/asdl.h index 549df2ace7555..e962560bcd4cb 100644 --- a/Include/asdl.h +++ b/Include/asdl.h @@ -4,9 +4,7 @@ typedef PyObject * identifier; typedef PyObject * string; -typedef PyObject * bytes; typedef PyObject * object; -typedef PyObject * singleton; typedef PyObject * constant; /* It would be nice if the code generated by asdl_c.py was completely diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index a8a13fdcd7426..6b71adac4e4a6 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -597,7 +597,7 @@ def test_empty_yield_from(self): empty_yield_from.body[0].body[0].value.value = None with self.assertRaises(ValueError) as cm: compile(empty_yield_from, "", "exec") - self.assertIn("field value is required", str(cm.exception)) + self.assertIn("field 'value' is required", str(cm.exception)) @support.cpython_only def test_issue31592(self): diff --git a/Parser/Python.asdl b/Parser/Python.asdl index f789f1da456e9..889712b4b3d36 100644 --- a/Parser/Python.asdl +++ b/Parser/Python.asdl @@ -1,5 +1,5 @@ --- ASDL's 5 builtin types are: --- identifier, int, string, object, constant +-- ASDL's 4 builtin types are: +-- identifier, int, string, constant module Python { diff --git a/Parser/asdl.py b/Parser/asdl.py index 5416377100c64..7f509488b96ed 100644 --- a/Parser/asdl.py +++ b/Parser/asdl.py @@ -33,8 +33,7 @@ # See the EBNF at the top of the file to understand the logical connection # between the various node types. -builtin_types = {'identifier', 'string', 'bytes', 'int', 'object', 'singleton', - 'constant'} +builtin_types = {'identifier', 'string', 'int', 'constant'} class AST: def __repr__(self): diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index c98f949042f30..59bf03ef8df3d 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -323,7 +323,7 @@ def emit(s, depth=0, reflow=True): if not opt and argtype != "int": emit("if (!%s) {" % argname, 1) emit("PyErr_SetString(PyExc_ValueError,", 2) - msg = "field %s is required for %s" % (argname, name) + msg = "field '%s' is required for %s" % (argname, name) emit(' "%s");' % msg, 2, reflow=False) emit('return NULL;', 2) @@ -853,11 +853,9 @@ def visitModule(self, mod): Py_INCREF((PyObject*)o); return (PyObject*)o; } -#define ast2obj_singleton ast2obj_object #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object #define ast2obj_string ast2obj_object -#define ast2obj_bytes ast2obj_object static PyObject* ast2obj_int(long b) { @@ -1147,12 +1145,8 @@ def simpleSum(self, sum, name): self.emit("case %s:" % t.name, 2) self.emit("Py_INCREF(astmodulestate_global->%s_singleton);" % t.name, 3) self.emit("return astmodulestate_global->%s_singleton;" % t.name, 3) - self.emit("default:", 2) - self.emit('/* should never happen, but just in case ... */', 3) - code = "PyErr_Format(PyExc_SystemError, \"unknown %s found\");" % name - self.emit(code, 3, reflow=False) - self.emit("return NULL;", 3) self.emit("}", 1) + self.emit("Py_UNREACHABLE();", 1); self.emit("}", 0) def visitProduct(self, prod, name): diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 80f91646fd62e..f34b1450c66ef 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -1294,11 +1294,9 @@ static PyObject* ast2obj_object(void *o) Py_INCREF((PyObject*)o); return (PyObject*)o; } -#define ast2obj_singleton ast2obj_object #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object #define ast2obj_string ast2obj_object -#define ast2obj_bytes ast2obj_object static PyObject* ast2obj_int(long b) { @@ -2077,7 +2075,7 @@ Expression(expr_ty body, PyArena *arena) mod_ty p; if (!body) { PyErr_SetString(PyExc_ValueError, - "field body is required for Expression"); + "field 'body' is required for Expression"); return NULL; } p = (mod_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2094,7 +2092,7 @@ FunctionType(asdl_seq * argtypes, expr_ty returns, PyArena *arena) mod_ty p; if (!returns) { PyErr_SetString(PyExc_ValueError, - "field returns is required for FunctionType"); + "field 'returns' is required for FunctionType"); return NULL; } p = (mod_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2114,12 +2112,12 @@ FunctionDef(identifier name, arguments_ty args, asdl_seq * body, asdl_seq * stmt_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for FunctionDef"); + "field 'name' is required for FunctionDef"); return NULL; } if (!args) { PyErr_SetString(PyExc_ValueError, - "field args is required for FunctionDef"); + "field 'args' is required for FunctionDef"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2148,12 +2146,12 @@ AsyncFunctionDef(identifier name, arguments_ty args, asdl_seq * body, asdl_seq stmt_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for AsyncFunctionDef"); + "field 'name' is required for AsyncFunctionDef"); return NULL; } if (!args) { PyErr_SetString(PyExc_ValueError, - "field args is required for AsyncFunctionDef"); + "field 'args' is required for AsyncFunctionDef"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2181,7 +2179,7 @@ ClassDef(identifier name, asdl_seq * bases, asdl_seq * keywords, asdl_seq * stmt_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for ClassDef"); + "field 'name' is required for ClassDef"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2241,7 +2239,7 @@ Assign(asdl_seq * targets, expr_ty value, string type_comment, int lineno, int stmt_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Assign"); + "field 'value' is required for Assign"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2265,17 +2263,17 @@ AugAssign(expr_ty target, operator_ty op, expr_ty value, int lineno, int stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for AugAssign"); + "field 'target' is required for AugAssign"); return NULL; } if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for AugAssign"); + "field 'op' is required for AugAssign"); return NULL; } if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for AugAssign"); + "field 'value' is required for AugAssign"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2300,12 +2298,12 @@ AnnAssign(expr_ty target, expr_ty annotation, expr_ty value, int simple, int stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for AnnAssign"); + "field 'target' is required for AnnAssign"); return NULL; } if (!annotation) { PyErr_SetString(PyExc_ValueError, - "field annotation is required for AnnAssign"); + "field 'annotation' is required for AnnAssign"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2331,12 +2329,12 @@ For(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, string stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for For"); + "field 'target' is required for For"); return NULL; } if (!iter) { PyErr_SetString(PyExc_ValueError, - "field iter is required for For"); + "field 'iter' is required for For"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2363,12 +2361,12 @@ AsyncFor(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for AsyncFor"); + "field 'target' is required for AsyncFor"); return NULL; } if (!iter) { PyErr_SetString(PyExc_ValueError, - "field iter is required for AsyncFor"); + "field 'iter' is required for AsyncFor"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2394,7 +2392,7 @@ While(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno, int stmt_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for While"); + "field 'test' is required for While"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2418,7 +2416,7 @@ If(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno, int stmt_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for If"); + "field 'test' is required for If"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2519,7 +2517,7 @@ Assert(expr_ty test, expr_ty msg, int lineno, int col_offset, int end_lineno, stmt_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for Assert"); + "field 'test' is required for Assert"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2612,7 +2610,7 @@ Expr(expr_ty value, int lineno, int col_offset, int end_lineno, int stmt_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Expr"); + "field 'value' is required for Expr"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2682,7 +2680,7 @@ BoolOp(boolop_ty op, asdl_seq * values, int lineno, int col_offset, int expr_ty p; if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for BoolOp"); + "field 'op' is required for BoolOp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2705,12 +2703,12 @@ NamedExpr(expr_ty target, expr_ty value, int lineno, int col_offset, int expr_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for NamedExpr"); + "field 'target' is required for NamedExpr"); return NULL; } if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for NamedExpr"); + "field 'value' is required for NamedExpr"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2733,17 +2731,17 @@ BinOp(expr_ty left, operator_ty op, expr_ty right, int lineno, int col_offset, expr_ty p; if (!left) { PyErr_SetString(PyExc_ValueError, - "field left is required for BinOp"); + "field 'left' is required for BinOp"); return NULL; } if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for BinOp"); + "field 'op' is required for BinOp"); return NULL; } if (!right) { PyErr_SetString(PyExc_ValueError, - "field right is required for BinOp"); + "field 'right' is required for BinOp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2767,12 +2765,12 @@ UnaryOp(unaryop_ty op, expr_ty operand, int lineno, int col_offset, int expr_ty p; if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for UnaryOp"); + "field 'op' is required for UnaryOp"); return NULL; } if (!operand) { PyErr_SetString(PyExc_ValueError, - "field operand is required for UnaryOp"); + "field 'operand' is required for UnaryOp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2795,12 +2793,12 @@ Lambda(arguments_ty args, expr_ty body, int lineno, int col_offset, int expr_ty p; if (!args) { PyErr_SetString(PyExc_ValueError, - "field args is required for Lambda"); + "field 'args' is required for Lambda"); return NULL; } if (!body) { PyErr_SetString(PyExc_ValueError, - "field body is required for Lambda"); + "field 'body' is required for Lambda"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2823,17 +2821,17 @@ IfExp(expr_ty test, expr_ty body, expr_ty orelse, int lineno, int col_offset, expr_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for IfExp"); + "field 'test' is required for IfExp"); return NULL; } if (!body) { PyErr_SetString(PyExc_ValueError, - "field body is required for IfExp"); + "field 'body' is required for IfExp"); return NULL; } if (!orelse) { PyErr_SetString(PyExc_ValueError, - "field orelse is required for IfExp"); + "field 'orelse' is required for IfExp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2892,7 +2890,7 @@ ListComp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, int expr_ty p; if (!elt) { PyErr_SetString(PyExc_ValueError, - "field elt is required for ListComp"); + "field 'elt' is required for ListComp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2915,7 +2913,7 @@ SetComp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, int expr_ty p; if (!elt) { PyErr_SetString(PyExc_ValueError, - "field elt is required for SetComp"); + "field 'elt' is required for SetComp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2938,12 +2936,12 @@ DictComp(expr_ty key, expr_ty value, asdl_seq * generators, int lineno, int expr_ty p; if (!key) { PyErr_SetString(PyExc_ValueError, - "field key is required for DictComp"); + "field 'key' is required for DictComp"); return NULL; } if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for DictComp"); + "field 'value' is required for DictComp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2967,7 +2965,7 @@ GeneratorExp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, expr_ty p; if (!elt) { PyErr_SetString(PyExc_ValueError, - "field elt is required for GeneratorExp"); + "field 'elt' is required for GeneratorExp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2990,7 +2988,7 @@ Await(expr_ty value, int lineno, int col_offset, int end_lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Await"); + "field 'value' is required for Await"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3029,7 +3027,7 @@ YieldFrom(expr_ty value, int lineno, int col_offset, int end_lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for YieldFrom"); + "field 'value' is required for YieldFrom"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3051,7 +3049,7 @@ Compare(expr_ty left, asdl_int_seq * ops, asdl_seq * comparators, int lineno, expr_ty p; if (!left) { PyErr_SetString(PyExc_ValueError, - "field left is required for Compare"); + "field 'left' is required for Compare"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3075,7 +3073,7 @@ Call(expr_ty func, asdl_seq * args, asdl_seq * keywords, int lineno, int expr_ty p; if (!func) { PyErr_SetString(PyExc_ValueError, - "field func is required for Call"); + "field 'func' is required for Call"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3100,7 +3098,7 @@ FormattedValue(expr_ty value, int conversion, expr_ty format_spec, int lineno, expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for FormattedValue"); + "field 'value' is required for FormattedValue"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3141,7 +3139,7 @@ Constant(constant value, string kind, int lineno, int col_offset, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Constant"); + "field 'value' is required for Constant"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3164,17 +3162,17 @@ Attribute(expr_ty value, identifier attr, expr_context_ty ctx, int lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Attribute"); + "field 'value' is required for Attribute"); return NULL; } if (!attr) { PyErr_SetString(PyExc_ValueError, - "field attr is required for Attribute"); + "field 'attr' is required for Attribute"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Attribute"); + "field 'ctx' is required for Attribute"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3198,17 +3196,17 @@ Subscript(expr_ty value, expr_ty slice, expr_context_ty ctx, int lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Subscript"); + "field 'value' is required for Subscript"); return NULL; } if (!slice) { PyErr_SetString(PyExc_ValueError, - "field slice is required for Subscript"); + "field 'slice' is required for Subscript"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Subscript"); + "field 'ctx' is required for Subscript"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3232,12 +3230,12 @@ Starred(expr_ty value, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Starred"); + "field 'value' is required for Starred"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Starred"); + "field 'ctx' is required for Starred"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3260,12 +3258,12 @@ Name(identifier id, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!id) { PyErr_SetString(PyExc_ValueError, - "field id is required for Name"); + "field 'id' is required for Name"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Name"); + "field 'ctx' is required for Name"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3288,7 +3286,7 @@ List(asdl_seq * elts, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for List"); + "field 'ctx' is required for List"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3311,7 +3309,7 @@ Tuple(asdl_seq * elts, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Tuple"); + "field 'ctx' is required for Tuple"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3353,12 +3351,12 @@ comprehension(expr_ty target, expr_ty iter, asdl_seq * ifs, int is_async, comprehension_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for comprehension"); + "field 'target' is required for comprehension"); return NULL; } if (!iter) { PyErr_SetString(PyExc_ValueError, - "field iter is required for comprehension"); + "field 'iter' is required for comprehension"); return NULL; } p = (comprehension_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3416,7 +3414,7 @@ arg(identifier arg, expr_ty annotation, string type_comment, int lineno, int arg_ty p; if (!arg) { PyErr_SetString(PyExc_ValueError, - "field arg is required for arg"); + "field 'arg' is required for arg"); return NULL; } p = (arg_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3439,7 +3437,7 @@ keyword(identifier arg, expr_ty value, int lineno, int col_offset, int keyword_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for keyword"); + "field 'value' is required for keyword"); return NULL; } p = (keyword_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3460,7 +3458,7 @@ alias(identifier name, identifier asname, PyArena *arena) alias_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for alias"); + "field 'name' is required for alias"); return NULL; } p = (alias_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3477,7 +3475,7 @@ withitem(expr_ty context_expr, expr_ty optional_vars, PyArena *arena) withitem_ty p; if (!context_expr) { PyErr_SetString(PyExc_ValueError, - "field context_expr is required for withitem"); + "field 'context_expr' is required for withitem"); return NULL; } p = (withitem_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3494,7 +3492,7 @@ TypeIgnore(int lineno, string tag, PyArena *arena) type_ignore_ty p; if (!tag) { PyErr_SetString(PyExc_ValueError, - "field tag is required for TypeIgnore"); + "field 'tag' is required for TypeIgnore"); return NULL; } p = (type_ignore_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -4602,11 +4600,8 @@ PyObject* ast2obj_expr_context(expr_context_ty o) case Del: Py_INCREF(astmodulestate_global->Del_singleton); return astmodulestate_global->Del_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown expr_context found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_boolop(boolop_ty o) { @@ -4617,11 +4612,8 @@ PyObject* ast2obj_boolop(boolop_ty o) case Or: Py_INCREF(astmodulestate_global->Or_singleton); return astmodulestate_global->Or_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown boolop found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_operator(operator_ty o) { @@ -4665,11 +4657,8 @@ PyObject* ast2obj_operator(operator_ty o) case FloorDiv: Py_INCREF(astmodulestate_global->FloorDiv_singleton); return astmodulestate_global->FloorDiv_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown operator found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_unaryop(unaryop_ty o) { @@ -4686,11 +4675,8 @@ PyObject* ast2obj_unaryop(unaryop_ty o) case USub: Py_INCREF(astmodulestate_global->USub_singleton); return astmodulestate_global->USub_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown unaryop found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_cmpop(cmpop_ty o) { @@ -4725,11 +4711,8 @@ PyObject* ast2obj_cmpop(cmpop_ty o) case NotIn: Py_INCREF(astmodulestate_global->NotIn_singleton); return astmodulestate_global->NotIn_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown cmpop found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_comprehension(void* _o) From webhook-mailer at python.org Wed May 6 12:24:07 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 06 May 2020 16:24:07 -0000 Subject: [Python-checkins] bpo-40521: Disable method cache in subinterpreters (GH-19960) Message-ID: https://github.com/python/cpython/commit/89fc4a34cf7a01df9dd269d32d3706c68a72d130 commit: 89fc4a34cf7a01df9dd269d32d3706c68a72d130 branch: master author: Victor Stinner committer: GitHub date: 2020-05-06T18:23:58+02:00 summary: bpo-40521: Disable method cache in subinterpreters (GH-19960) When Python is built with experimental isolated interpreters, disable the type method cache. Temporary workaround until the cache is made per-interpreter. files: M Objects/typeobject.c diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 1565b90898605..0d5600b4ce4fa 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -19,6 +19,12 @@ class object "PyObject *" "&PyBaseObject_Type" #include "clinic/typeobject.c.h" +/* bpo-40521: Type method cache is shared by all subinterpreters */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define MCACHE +#endif + +#ifdef MCACHE /* Support type attribute cache */ /* The cache can keep references to the names alive for longer than @@ -47,6 +53,7 @@ struct method_cache_entry { static struct method_cache_entry method_cache[1 << MCACHE_SIZE_EXP]; static unsigned int next_version_tag = 0; +#endif #define MCACHE_STATS 0 @@ -216,6 +223,7 @@ _PyType_GetTextSignatureFromInternalDoc(const char *name, const char *internal_d unsigned int PyType_ClearCache(void) { +#ifdef MCACHE Py_ssize_t i; unsigned int cur_version_tag = next_version_tag - 1; @@ -240,6 +248,9 @@ PyType_ClearCache(void) /* mark all version tags as invalid */ PyType_Modified(&PyBaseObject_Type); return cur_version_tag; +#else + return 0; +#endif } void @@ -350,6 +361,7 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) { Py_TPFLAGS_VALID_VERSION_TAG); } +#ifdef MCACHE static int assign_version_tag(PyTypeObject *type) { @@ -396,6 +408,7 @@ assign_version_tag(PyTypeObject *type) type->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; return 1; } +#endif static PyMemberDef type_members[] = { @@ -3232,12 +3245,12 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) { PyObject *res; int error; - unsigned int h; +#ifdef MCACHE if (MCACHE_CACHEABLE_NAME(name) && _PyType_HasFeature(type, Py_TPFLAGS_VALID_VERSION_TAG)) { /* fast path */ - h = MCACHE_HASH_METHOD(type, name); + unsigned int h = MCACHE_HASH_METHOD(type, name); if (method_cache[h].version == type->tp_version_tag && method_cache[h].name == name) { #if MCACHE_STATS @@ -3246,6 +3259,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) return method_cache[h].value; } } +#endif /* We may end up clearing live exceptions below, so make sure it's ours. */ assert(!PyErr_Occurred()); @@ -3267,8 +3281,9 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) return NULL; } +#ifdef MCACHE if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(type)) { - h = MCACHE_HASH_METHOD(type, name); + unsigned int h = MCACHE_HASH_METHOD(type, name); method_cache[h].version = type->tp_version_tag; method_cache[h].value = res; /* borrowed */ Py_INCREF(name); @@ -3281,6 +3296,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) #endif Py_SETREF(method_cache[h].name, name); } +#endif return res; } From webhook-mailer at python.org Wed May 6 12:25:11 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 06 May 2020 16:25:11 -0000 Subject: [Python-checkins] bpo-40533: Disable GC in subinterpreters (GH-19961) Message-ID: https://github.com/python/cpython/commit/d8135e913ab7c694db247c86d0a84c450c32d86e commit: d8135e913ab7c694db247c86d0a84c450c32d86e branch: master author: Victor Stinner committer: GitHub date: 2020-05-06T18:25:06+02:00 summary: bpo-40533: Disable GC in subinterpreters (GH-19961) When Python is built with experimental isolated interpreters, a garbage collection now does nothing in an isolated interpreter. Temporary workaround until subinterpreters stop sharing Python objects. files: M Modules/gcmodule.c diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 56dcb101e0005..a44752b1cc4da 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1181,6 +1181,14 @@ collect(PyThreadState *tstate, int generation, _PyTime_t t1 = 0; /* initialize to prevent a compiler warning */ GCState *gcstate = &tstate->interp->gc; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + if (tstate->interp->config._isolated_interpreter) { + // bpo-40533: The garbage collector must not be run on parallel on + // Python objects shared by multiple interpreters. + return 0; + } +#endif + if (gcstate->debug & DEBUG_STATS) { PySys_WriteStderr("gc: collecting generation %d...\n", generation); show_stats_each_generations(gcstate); From webhook-mailer at python.org Wed May 6 13:05:35 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 06 May 2020 17:05:35 -0000 Subject: [Python-checkins] bpo-40521: Disable list free list in subinterpreters (GH-19959) Message-ID: https://github.com/python/cpython/commit/b7aa23d29fa48238dab3692d02e1f0a7e8a5af9c commit: b7aa23d29fa48238dab3692d02e1f0a7e8a5af9c branch: master author: Victor Stinner committer: GitHub date: 2020-05-06T19:05:27+02:00 summary: bpo-40521: Disable list free list in subinterpreters (GH-19959) When Python is built with experimental isolated interpreters, disable the list free list. Temporary workaround until this cache is made per-interpreter. files: M Objects/listobject.c diff --git a/Objects/listobject.c b/Objects/listobject.c index 904bea317c9da..37fadca129ac0 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -98,8 +98,15 @@ list_preallocate_exact(PyListObject *self, Py_ssize_t size) /* Empty list reuse scheme to save calls to malloc and free */ #ifndef PyList_MAXFREELIST -#define PyList_MAXFREELIST 80 +# define PyList_MAXFREELIST 80 #endif + +/* bpo-40521: list free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyList_MAXFREELIST +# define PyList_MAXFREELIST 0 +#endif + static PyListObject *free_list[PyList_MAXFREELIST]; static int numfree = 0; From webhook-mailer at python.org Wed May 6 14:11:15 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Wed, 06 May 2020 18:11:15 -0000 Subject: [Python-checkins] bpo-40334: Add type to the assignment rule in the grammar file (GH-19963) Message-ID: https://github.com/python/cpython/commit/999ec9ab6af536cc2666a0847ec02331aaf00416 commit: 999ec9ab6af536cc2666a0847ec02331aaf00416 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-06T19:11:04+01:00 summary: bpo-40334: Add type to the assignment rule in the grammar file (GH-19963) files: M Grammar/python.gram M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 0ce6ab4b4ba90..3f16768198f9d 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -82,7 +82,7 @@ compound_stmt[stmt_ty]: | &'while' while_stmt # NOTE: annotated_rhs may start with 'yield'; yield_expr must start with 'yield' -assignment: +assignment[stmt_ty]: | a=NAME ':' b=expression c=['=' d=annotated_rhs { d }] { CHECK_VERSION( 6, diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 55605d5770f1e..3b518ee263777 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -378,7 +378,7 @@ static asdl_seq* statement_newline_rule(Parser *p); static asdl_seq* simple_stmt_rule(Parser *p); static stmt_ty small_stmt_rule(Parser *p); static stmt_ty compound_stmt_rule(Parser *p); -static void *assignment_rule(Parser *p); +static stmt_ty assignment_rule(Parser *p); static AugOperator* augassign_rule(Parser *p); static stmt_ty global_stmt_rule(Parser *p); static stmt_ty nonlocal_stmt_rule(Parser *p); @@ -1256,7 +1256,7 @@ small_stmt_rule(Parser *p) int start_col_offset = p->tokens[mark]->col_offset; UNUSED(start_col_offset); // Only used by EXTRA macro { // assignment - void *assignment_var; + stmt_ty assignment_var; if ( (assignment_var = assignment_rule(p)) ) @@ -1586,13 +1586,13 @@ compound_stmt_rule(Parser *p) // | ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? // | target augassign (yield_expr | star_expressions) // | invalid_assignment -static void * +static stmt_ty assignment_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + stmt_ty res = NULL; int mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; From webhook-mailer at python.org Wed May 6 15:51:52 2020 From: webhook-mailer at python.org (Naglis) Date: Wed, 06 May 2020 19:51:52 -0000 Subject: [Python-checkins] Fix typo in sqlite3 documentation (GH-19965) Message-ID: https://github.com/python/cpython/commit/441416c9a06f11f28e17d56c915ea6116c0c9ea7 commit: 441416c9a06f11f28e17d56c915ea6116c0c9ea7 branch: master author: Naglis committer: GitHub date: 2020-05-06T16:51:43-03:00 summary: Fix typo in sqlite3 documentation (GH-19965) *first* is repeated twice. files: M Doc/library/sqlite3.rst diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index 314d3a58e2759..ccb82278bdaa1 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -928,7 +928,7 @@ a class like this:: self.x, self.y = x, y Now you want to store the point in a single SQLite column. First you'll have to -choose one of the supported types first to be used for representing the point. +choose one of the supported types to be used for representing the point. Let's just use str and separate the coordinates using a semicolon. Then you need to give your class a method ``__conform__(self, protocol)`` which must return the converted value. The parameter *protocol* will be :class:`PrepareProtocol`. From webhook-mailer at python.org Wed May 6 17:54:42 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Wed, 06 May 2020 21:54:42 -0000 Subject: [Python-checkins] bpo-40334: Allow trailing comma in parenthesised context managers (GH-19964) Message-ID: https://github.com/python/cpython/commit/99db2a1db7a9b468a0ce8377d579f78fa03a2a34 commit: 99db2a1db7a9b468a0ce8377d579f78fa03a2a34 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-06T22:54:34+01:00 summary: bpo-40334: Allow trailing comma in parenthesised context managers (GH-19964) files: M Grammar/python.gram M Lib/test/test_grammar.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 3f16768198f9d..3d8a39b1d5906 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -170,11 +170,11 @@ for_stmt[stmt_ty]: CHECK_VERSION(5, "Async for loops are", _Py_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) } with_stmt[stmt_ty]: - | 'with' '(' a=','.with_item+ ')' ':' b=block { + | 'with' '(' a=','.with_item+ ','? ')' ':' b=block { _Py_With(a, b, NULL, EXTRA) } | 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { _Py_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | ASYNC 'with' '(' a=','.with_item+ ')' ':' b=block { + | ASYNC 'with' '(' a=','.with_item+ ','? ')' ':' b=block { CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NULL, EXTRA)) } | ASYNC 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) } diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index 922a5166ec12f..c24d3529490be 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -1,7 +1,7 @@ # Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. -from test.support import check_syntax_error, check_syntax_warning +from test.support import check_syntax_error, check_syntax_warning, use_old_parser import inspect import unittest import sys @@ -1694,6 +1694,70 @@ def __exit__(self, *args): with manager() as x, manager(): pass + if not use_old_parser(): + test_cases = [ + """if 1: + with ( + manager() + ): + pass + """, + """if 1: + with ( + manager() as x + ): + pass + """, + """if 1: + with ( + manager() as (x, y), + manager() as z, + ): + pass + """, + """if 1: + with ( + manager(), + manager() + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y, + manager() as z, + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y, + manager(), + ): + pass + """, + ] + for case in test_cases: + with self.subTest(case=case): + compile(case, "", "exec") + + def test_if_else_expr(self): # Test ifelse expressions in various cases def _checkeval(msg, ret): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 3b518ee263777..d86390839d528 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -3031,9 +3031,9 @@ for_stmt_rule(Parser *p) } // with_stmt: -// | 'with' '(' ','.with_item+ ')' ':' block +// | 'with' '(' ','.with_item+ ','? ')' ':' block // | 'with' ','.with_item+ ':' TYPE_COMMENT? block -// | ASYNC 'with' '(' ','.with_item+ ')' ':' block +// | ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block // | ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block static stmt_ty with_stmt_rule(Parser *p) @@ -3051,13 +3051,15 @@ with_stmt_rule(Parser *p) UNUSED(start_lineno); // Only used by EXTRA macro int start_col_offset = p->tokens[mark]->col_offset; UNUSED(start_col_offset); // Only used by EXTRA macro - { // 'with' '(' ','.with_item+ ')' ':' block + { // 'with' '(' ','.with_item+ ','? ')' ':' block asdl_seq * a; asdl_seq* b; Token * keyword; Token * literal; Token * literal_1; Token * literal_2; + void *opt_var; + UNUSED(opt_var); // Silence compiler warnings if ( (keyword = _PyPegen_expect_token(p, 519)) && @@ -3065,6 +3067,8 @@ with_stmt_rule(Parser *p) && (a = _gather_38_rule(p)) && + (opt_var = _PyPegen_expect_token(p, 12), 1) + && (literal_1 = _PyPegen_expect_token(p, 8)) && (literal_2 = _PyPegen_expect_token(p, 11)) @@ -3124,7 +3128,7 @@ with_stmt_rule(Parser *p) } p->mark = mark; } - { // ASYNC 'with' '(' ','.with_item+ ')' ':' block + { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block asdl_seq * a; Token * async_var; asdl_seq* b; @@ -3132,6 +3136,8 @@ with_stmt_rule(Parser *p) Token * literal; Token * literal_1; Token * literal_2; + void *opt_var; + UNUSED(opt_var); // Silence compiler warnings if ( (async_var = _PyPegen_expect_token(p, ASYNC)) && @@ -3141,6 +3147,8 @@ with_stmt_rule(Parser *p) && (a = _gather_42_rule(p)) && + (opt_var = _PyPegen_expect_token(p, 12), 1) + && (literal_1 = _PyPegen_expect_token(p, 8)) && (literal_2 = _PyPegen_expect_token(p, 11)) From webhook-mailer at python.org Wed May 6 18:14:51 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Wed, 06 May 2020 22:14:51 -0000 Subject: [Python-checkins] bpo-40334: Generate comments in the parser code to improve debugging (GH-19966) Message-ID: https://github.com/python/cpython/commit/470aac4d8e76556bd8f820f3f3928dca2b4d2849 commit: 470aac4d8e76556bd8f820f3f3928dca2b4d2849 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-06T23:14:43+01:00 summary: bpo-40334: Generate comments in the parser code to improve debugging (GH-19966) files: M Parser/pegen/parse.c M Tools/peg_generator/pegen/c_generator.py diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index d86390839d528..b1da16640aa6e 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -678,9 +678,9 @@ file_rule(Parser *p) void *a; Token * endmarker_var; if ( - (a = statements_rule(p), 1) + (a = statements_rule(p), 1) // statements? && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { res = _PyPegen_make_module ( p , a ); @@ -709,7 +709,7 @@ interactive_rule(Parser *p) { // statement_newline asdl_seq* a; if ( - (a = statement_newline_rule(p)) + (a = statement_newline_rule(p)) // statement_newline ) { res = Interactive ( a , p -> arena ); @@ -740,11 +740,11 @@ eval_rule(Parser *p) expr_ty a; Token * endmarker_var; if ( - (a = expressions_rule(p)) + (a = expressions_rule(p)) // expressions && - (_loop0_1_var = _loop0_1_rule(p)) + (_loop0_1_var = _loop0_1_rule(p)) // NEWLINE* && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { res = Expression ( a , p -> arena ); @@ -779,19 +779,19 @@ func_type_rule(Parser *p) Token * literal_1; Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = type_expressions_rule(p), 1) + (a = type_expressions_rule(p), 1) // type_expressions? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 51)) + (literal_2 = _PyPegen_expect_token(p, 51)) // token='->' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (_loop0_2_var = _loop0_2_rule(p)) + (_loop0_2_var = _loop0_2_rule(p)) // NEWLINE* && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { res = FunctionType ( a , b , p -> arena ); @@ -820,7 +820,7 @@ fstring_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -858,19 +858,19 @@ type_expressions_rule(Parser *p) Token * literal_2; Token * literal_3; if ( - (a = _gather_3_rule(p)) + (a = _gather_3_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) + (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (literal_2 = _PyPegen_expect_token(p, 12)) + (literal_2 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_3 = _PyPegen_expect_token(p, 35)) + (literal_3 = _PyPegen_expect_token(p, 35)) // token='**' && - (c = expression_rule(p)) + (c = expression_rule(p)) // expression ) { res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); @@ -888,13 +888,13 @@ type_expressions_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _gather_5_rule(p)) + (a = _gather_5_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) + (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { res = _PyPegen_seq_append_to_end ( p , a , b ); @@ -912,13 +912,13 @@ type_expressions_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _gather_7_rule(p)) + (a = _gather_7_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 35)) + (literal_1 = _PyPegen_expect_token(p, 35)) // token='**' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { res = _PyPegen_seq_append_to_end ( p , a , b ); @@ -937,15 +937,15 @@ type_expressions_rule(Parser *p) Token * literal_1; Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_2 = _PyPegen_expect_token(p, 35)) + (literal_2 = _PyPegen_expect_token(p, 35)) // token='**' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); @@ -961,9 +961,9 @@ type_expressions_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = _PyPegen_singleton_seq ( p , a ); @@ -979,9 +979,9 @@ type_expressions_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = _PyPegen_singleton_seq ( p , a ); @@ -996,7 +996,7 @@ type_expressions_rule(Parser *p) { // ','.expression+ asdl_seq * _gather_9_var; if ( - (_gather_9_var = _gather_9_rule(p)) + (_gather_9_var = _gather_9_rule(p)) // ','.expression+ ) { res = _gather_9_var; @@ -1021,7 +1021,7 @@ statements_rule(Parser *p) { // statement+ asdl_seq * a; if ( - (a = _loop1_11_rule(p)) + (a = _loop1_11_rule(p)) // statement+ ) { res = _PyPegen_seq_flatten ( p , a ); @@ -1050,7 +1050,7 @@ statement_rule(Parser *p) { // compound_stmt stmt_ty a; if ( - (a = compound_stmt_rule(p)) + (a = compound_stmt_rule(p)) // compound_stmt ) { res = _PyPegen_singleton_seq ( p , a ); @@ -1065,7 +1065,7 @@ statement_rule(Parser *p) { // simple_stmt asdl_seq* simple_stmt_var; if ( - (simple_stmt_var = simple_stmt_rule(p)) + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { res = simple_stmt_var; @@ -1099,9 +1099,9 @@ statement_newline_rule(Parser *p) stmt_ty a; Token * newline_var; if ( - (a = compound_stmt_rule(p)) + (a = compound_stmt_rule(p)) // compound_stmt && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = _PyPegen_singleton_seq ( p , a ); @@ -1116,7 +1116,7 @@ statement_newline_rule(Parser *p) { // simple_stmt asdl_seq* simple_stmt_var; if ( - (simple_stmt_var = simple_stmt_rule(p)) + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { res = simple_stmt_var; @@ -1127,7 +1127,7 @@ statement_newline_rule(Parser *p) { // NEWLINE Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1150,7 +1150,7 @@ statement_newline_rule(Parser *p) { // $ Token * endmarker_var; if ( - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { res = _PyPegen_interactive_exit ( p ); @@ -1180,11 +1180,11 @@ simple_stmt_rule(Parser *p) stmt_ty a; Token * newline_var; if ( - (a = small_stmt_rule(p)) + (a = small_stmt_rule(p)) // small_stmt && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 13) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 13) // token=';' && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = _PyPegen_singleton_seq ( p , a ); @@ -1202,11 +1202,11 @@ simple_stmt_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_12_rule(p)) + (a = _gather_12_rule(p)) // ';'.small_stmt+ && - (opt_var = _PyPegen_expect_token(p, 13), 1) + (opt_var = _PyPegen_expect_token(p, 13), 1) // ';'? && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = a; @@ -1258,7 +1258,7 @@ small_stmt_rule(Parser *p) { // assignment stmt_ty assignment_var; if ( - (assignment_var = assignment_rule(p)) + (assignment_var = assignment_rule(p)) // assignment ) { res = assignment_var; @@ -1269,7 +1269,7 @@ small_stmt_rule(Parser *p) { // star_expressions expr_ty e; if ( - (e = star_expressions_rule(p)) + (e = star_expressions_rule(p)) // star_expressions ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1292,9 +1292,9 @@ small_stmt_rule(Parser *p) { // &'return' return_stmt stmt_ty return_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) // token='return' && - (return_stmt_var = return_stmt_rule(p)) + (return_stmt_var = return_stmt_rule(p)) // return_stmt ) { res = return_stmt_var; @@ -1307,7 +1307,7 @@ small_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_14_rule, p) && - (import_stmt_var = import_stmt_rule(p)) + (import_stmt_var = import_stmt_rule(p)) // import_stmt ) { res = import_stmt_var; @@ -1318,9 +1318,9 @@ small_stmt_rule(Parser *p) { // &'raise' raise_stmt stmt_ty raise_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) // token='raise' && - (raise_stmt_var = raise_stmt_rule(p)) + (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt ) { res = raise_stmt_var; @@ -1331,7 +1331,7 @@ small_stmt_rule(Parser *p) { // 'pass' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 502)) + (keyword = _PyPegen_expect_token(p, 502)) // token='pass' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1354,9 +1354,9 @@ small_stmt_rule(Parser *p) { // &'del' del_stmt stmt_ty del_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) // token='del' && - (del_stmt_var = del_stmt_rule(p)) + (del_stmt_var = del_stmt_rule(p)) // del_stmt ) { res = del_stmt_var; @@ -1367,9 +1367,9 @@ small_stmt_rule(Parser *p) { // &'yield' yield_stmt stmt_ty yield_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) // token='yield' && - (yield_stmt_var = yield_stmt_rule(p)) + (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt ) { res = yield_stmt_var; @@ -1380,9 +1380,9 @@ small_stmt_rule(Parser *p) { // &'assert' assert_stmt stmt_ty assert_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) // token='assert' && - (assert_stmt_var = assert_stmt_rule(p)) + (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt ) { res = assert_stmt_var; @@ -1393,7 +1393,7 @@ small_stmt_rule(Parser *p) { // 'break' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 506)) + (keyword = _PyPegen_expect_token(p, 506)) // token='break' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1416,7 +1416,7 @@ small_stmt_rule(Parser *p) { // 'continue' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 507)) + (keyword = _PyPegen_expect_token(p, 507)) // token='continue' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1439,9 +1439,9 @@ small_stmt_rule(Parser *p) { // &'global' global_stmt stmt_ty global_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) // token='global' && - (global_stmt_var = global_stmt_rule(p)) + (global_stmt_var = global_stmt_rule(p)) // global_stmt ) { res = global_stmt_var; @@ -1452,9 +1452,9 @@ small_stmt_rule(Parser *p) { // &'nonlocal' nonlocal_stmt stmt_ty nonlocal_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) // token='nonlocal' && - (nonlocal_stmt_var = nonlocal_stmt_rule(p)) + (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt ) { res = nonlocal_stmt_var; @@ -1489,7 +1489,7 @@ compound_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_15_rule, p) && - (function_def_var = function_def_rule(p)) + (function_def_var = function_def_rule(p)) // function_def ) { res = function_def_var; @@ -1500,9 +1500,9 @@ compound_stmt_rule(Parser *p) { // &'if' if_stmt stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) // token='if' && - (if_stmt_var = if_stmt_rule(p)) + (if_stmt_var = if_stmt_rule(p)) // if_stmt ) { res = if_stmt_var; @@ -1515,7 +1515,7 @@ compound_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_16_rule, p) && - (class_def_var = class_def_rule(p)) + (class_def_var = class_def_rule(p)) // class_def ) { res = class_def_var; @@ -1528,7 +1528,7 @@ compound_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_17_rule, p) && - (with_stmt_var = with_stmt_rule(p)) + (with_stmt_var = with_stmt_rule(p)) // with_stmt ) { res = with_stmt_var; @@ -1541,7 +1541,7 @@ compound_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_18_rule, p) && - (for_stmt_var = for_stmt_rule(p)) + (for_stmt_var = for_stmt_rule(p)) // for_stmt ) { res = for_stmt_var; @@ -1552,9 +1552,9 @@ compound_stmt_rule(Parser *p) { // &'try' try_stmt stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) // token='try' && - (try_stmt_var = try_stmt_rule(p)) + (try_stmt_var = try_stmt_rule(p)) // try_stmt ) { res = try_stmt_var; @@ -1565,9 +1565,9 @@ compound_stmt_rule(Parser *p) { // &'while' while_stmt stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) // token='while' && - (while_stmt_var = while_stmt_rule(p)) + (while_stmt_var = while_stmt_rule(p)) // while_stmt ) { res = while_stmt_var; @@ -1608,13 +1608,13 @@ assignment_rule(Parser *p) void *c; Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (c = _tmp_19_rule(p), 1) + (c = _tmp_19_rule(p), 1) // ['=' annotated_rhs] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1640,13 +1640,13 @@ assignment_rule(Parser *p) void *c; Token * literal; if ( - (a = _tmp_20_rule(p)) + (a = _tmp_20_rule(p)) // '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (c = _tmp_21_rule(p), 1) + (c = _tmp_21_rule(p), 1) // ['=' annotated_rhs] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1671,11 +1671,11 @@ assignment_rule(Parser *p) void *b; void *tc; if ( - (a = _loop1_22_rule(p)) + (a = _loop1_22_rule(p)) // ((star_targets '='))+ && - (b = _tmp_23_rule(p)) + (b = _tmp_23_rule(p)) // yield_expr | star_expressions && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1700,11 +1700,11 @@ assignment_rule(Parser *p) AugOperator* b; void *c; if ( - (a = target_rule(p)) + (a = target_rule(p)) // target && - (b = augassign_rule(p)) + (b = augassign_rule(p)) // augassign && - (c = _tmp_24_rule(p)) + (c = _tmp_24_rule(p)) // yield_expr | star_expressions ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1727,7 +1727,7 @@ assignment_rule(Parser *p) { // invalid_assignment void *invalid_assignment_var; if ( - (invalid_assignment_var = invalid_assignment_rule(p)) + (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment ) { res = invalid_assignment_var; @@ -1765,7 +1765,7 @@ augassign_rule(Parser *p) { // '+=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 36)) + (literal = _PyPegen_expect_token(p, 36)) // token='+=' ) { res = _PyPegen_augoperator ( p , Add ); @@ -1780,7 +1780,7 @@ augassign_rule(Parser *p) { // '-=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 37)) + (literal = _PyPegen_expect_token(p, 37)) // token='-=' ) { res = _PyPegen_augoperator ( p , Sub ); @@ -1795,7 +1795,7 @@ augassign_rule(Parser *p) { // '*=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 38)) + (literal = _PyPegen_expect_token(p, 38)) // token='*=' ) { res = _PyPegen_augoperator ( p , Mult ); @@ -1810,7 +1810,7 @@ augassign_rule(Parser *p) { // '@=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 50)) + (literal = _PyPegen_expect_token(p, 50)) // token='@=' ) { res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); @@ -1825,7 +1825,7 @@ augassign_rule(Parser *p) { // '/=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 39)) + (literal = _PyPegen_expect_token(p, 39)) // token='/=' ) { res = _PyPegen_augoperator ( p , Div ); @@ -1840,7 +1840,7 @@ augassign_rule(Parser *p) { // '%=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 40)) + (literal = _PyPegen_expect_token(p, 40)) // token='%=' ) { res = _PyPegen_augoperator ( p , Mod ); @@ -1855,7 +1855,7 @@ augassign_rule(Parser *p) { // '&=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 41)) + (literal = _PyPegen_expect_token(p, 41)) // token='&=' ) { res = _PyPegen_augoperator ( p , BitAnd ); @@ -1870,7 +1870,7 @@ augassign_rule(Parser *p) { // '|=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 42)) + (literal = _PyPegen_expect_token(p, 42)) // token='|=' ) { res = _PyPegen_augoperator ( p , BitOr ); @@ -1885,7 +1885,7 @@ augassign_rule(Parser *p) { // '^=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 43)) + (literal = _PyPegen_expect_token(p, 43)) // token='^=' ) { res = _PyPegen_augoperator ( p , BitXor ); @@ -1900,7 +1900,7 @@ augassign_rule(Parser *p) { // '<<=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 44)) + (literal = _PyPegen_expect_token(p, 44)) // token='<<=' ) { res = _PyPegen_augoperator ( p , LShift ); @@ -1915,7 +1915,7 @@ augassign_rule(Parser *p) { // '>>=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 45)) + (literal = _PyPegen_expect_token(p, 45)) // token='>>=' ) { res = _PyPegen_augoperator ( p , RShift ); @@ -1930,7 +1930,7 @@ augassign_rule(Parser *p) { // '**=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 46)) + (literal = _PyPegen_expect_token(p, 46)) // token='**=' ) { res = _PyPegen_augoperator ( p , Pow ); @@ -1945,7 +1945,7 @@ augassign_rule(Parser *p) { // '//=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 48)) + (literal = _PyPegen_expect_token(p, 48)) // token='//=' ) { res = _PyPegen_augoperator ( p , FloorDiv ); @@ -1983,9 +1983,9 @@ global_stmt_rule(Parser *p) asdl_seq * a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 508)) + (keyword = _PyPegen_expect_token(p, 508)) // token='global' && - (a = _gather_25_rule(p)) + (a = _gather_25_rule(p)) // ','.NAME+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2031,9 +2031,9 @@ nonlocal_stmt_rule(Parser *p) asdl_seq * a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 509)) + (keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal' && - (a = _gather_27_rule(p)) + (a = _gather_27_rule(p)) // ','.NAME+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2078,7 +2078,7 @@ yield_stmt_rule(Parser *p) { // yield_expr expr_ty y; if ( - (y = yield_expr_rule(p)) + (y = yield_expr_rule(p)) // yield_expr ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2125,11 +2125,11 @@ assert_stmt_rule(Parser *p) void *b; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 505)) + (keyword = _PyPegen_expect_token(p, 505)) // token='assert' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = _tmp_29_rule(p), 1) + (b = _tmp_29_rule(p), 1) // [',' expression] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2175,9 +2175,9 @@ del_stmt_rule(Parser *p) asdl_seq* a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 503)) + (keyword = _PyPegen_expect_token(p, 503)) // token='del' && - (a = del_targets_rule(p)) + (a = del_targets_rule(p)) // del_targets ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2214,7 +2214,7 @@ import_stmt_rule(Parser *p) { // import_name stmt_ty import_name_var; if ( - (import_name_var = import_name_rule(p)) + (import_name_var = import_name_rule(p)) // import_name ) { res = import_name_var; @@ -2225,7 +2225,7 @@ import_stmt_rule(Parser *p) { // import_from stmt_ty import_from_var; if ( - (import_from_var = import_from_rule(p)) + (import_from_var = import_from_rule(p)) // import_from ) { res = import_from_var; @@ -2259,9 +2259,9 @@ import_name_rule(Parser *p) asdl_seq* a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) + (keyword = _PyPegen_expect_token(p, 513)) // token='import' && - (a = dotted_as_names_rule(p)) + (a = dotted_as_names_rule(p)) // dotted_as_names ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2312,15 +2312,15 @@ import_from_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (a = _loop0_30_rule(p)) + (a = _loop0_30_rule(p)) // (('.' | '...'))* && - (b = dotted_name_rule(p)) + (b = dotted_name_rule(p)) // dotted_name && - (keyword_1 = _PyPegen_expect_token(p, 513)) + (keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && - (c = import_from_targets_rule(p)) + (c = import_from_targets_rule(p)) // import_from_targets ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2346,13 +2346,13 @@ import_from_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (a = _loop1_31_rule(p)) + (a = _loop1_31_rule(p)) // (('.' | '...'))+ && - (keyword_1 = _PyPegen_expect_token(p, 513)) + (keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && - (b = import_from_targets_rule(p)) + (b = import_from_targets_rule(p)) // import_from_targets ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2393,13 +2393,13 @@ import_from_targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = import_from_as_names_rule(p)) + (a = import_from_as_names_rule(p)) // import_from_as_names && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = a; @@ -2414,7 +2414,7 @@ import_from_targets_rule(Parser *p) { // import_from_as_names asdl_seq* import_from_as_names_var; if ( - (import_from_as_names_var = import_from_as_names_rule(p)) + (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names ) { res = import_from_as_names_var; @@ -2425,7 +2425,7 @@ import_from_targets_rule(Parser *p) { // '*' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' ) { res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); @@ -2454,7 +2454,7 @@ import_from_as_names_rule(Parser *p) { // ','.import_from_as_name+ asdl_seq * a; if ( - (a = _gather_32_rule(p)) + (a = _gather_32_rule(p)) // ','.import_from_as_name+ ) { res = a; @@ -2484,9 +2484,9 @@ import_from_as_name_rule(Parser *p) expr_ty a; void *b; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_34_rule(p), 1) + (b = _tmp_34_rule(p), 1) // ['as' NAME] ) { res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); @@ -2515,7 +2515,7 @@ dotted_as_names_rule(Parser *p) { // ','.dotted_as_name+ asdl_seq * a; if ( - (a = _gather_35_rule(p)) + (a = _gather_35_rule(p)) // ','.dotted_as_name+ ) { res = a; @@ -2545,9 +2545,9 @@ dotted_as_name_rule(Parser *p) expr_ty a; void *b; if ( - (a = dotted_name_rule(p)) + (a = dotted_name_rule(p)) // dotted_name && - (b = _tmp_37_rule(p), 1) + (b = _tmp_37_rule(p), 1) // ['as' NAME] ) { res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); @@ -2603,11 +2603,11 @@ dotted_name_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = dotted_name_rule(p)) + (a = dotted_name_rule(p)) // dotted_name && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_join_names_with_dot ( p , a , b ); @@ -2622,7 +2622,7 @@ dotted_name_raw(Parser *p) { // NAME expr_ty name_var; if ( - (name_var = _PyPegen_name_token(p)) + (name_var = _PyPegen_name_token(p)) // NAME ) { res = name_var; @@ -2661,15 +2661,15 @@ if_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = elif_stmt_rule(p)) + (c = elif_stmt_rule(p)) // elif_stmt ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2696,15 +2696,15 @@ if_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = else_block_rule(p), 1) + (c = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2755,15 +2755,15 @@ elif_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) + (keyword = _PyPegen_expect_token(p, 515)) // token='elif' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = elif_stmt_rule(p)) + (c = elif_stmt_rule(p)) // elif_stmt ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2790,15 +2790,15 @@ elif_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) + (keyword = _PyPegen_expect_token(p, 515)) // token='elif' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = else_block_rule(p), 1) + (c = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2837,11 +2837,11 @@ else_block_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 516)) + (keyword = _PyPegen_expect_token(p, 516)) // token='else' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { res = b; @@ -2882,15 +2882,15 @@ while_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 512)) + (keyword = _PyPegen_expect_token(p, 512)) // token='while' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = else_block_rule(p), 1) + (c = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2944,21 +2944,21 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (t = star_targets_rule(p)) + (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (ex = star_expressions_rule(p)) + (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (el = else_block_rule(p), 1) + (el = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2989,23 +2989,23 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (t = star_targets_rule(p)) + (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (ex = star_expressions_rule(p)) + (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (el = else_block_rule(p), 1) + (el = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3061,19 +3061,19 @@ with_stmt_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _gather_38_rule(p)) + (a = _gather_38_rule(p)) // ','.with_item+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 11)) + (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3100,15 +3100,15 @@ with_stmt_rule(Parser *p) Token * literal; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (a = _gather_40_rule(p)) + (a = _gather_40_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3139,21 +3139,21 @@ with_stmt_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _gather_42_rule(p)) + (a = _gather_42_rule(p)) // ','.with_item+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 11)) + (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3181,17 +3181,17 @@ with_stmt_rule(Parser *p) Token * literal; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (a = _gather_44_rule(p)) + (a = _gather_44_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3229,9 +3229,9 @@ with_item_rule(Parser *p) expr_ty e; void *o; if ( - (e = expression_rule(p)) + (e = expression_rule(p)) // expression && - (o = _tmp_46_rule(p), 1) + (o = _tmp_46_rule(p), 1) // ['as' target] ) { res = _Py_withitem ( e , o , p -> arena ); @@ -3273,13 +3273,13 @@ try_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) + (keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (f = finally_block_rule(p)) + (f = finally_block_rule(p)) // finally_block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3307,17 +3307,17 @@ try_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) + (keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (ex = _loop1_47_rule(p)) + (ex = _loop1_47_rule(p)) // except_block+ && - (el = else_block_rule(p), 1) + (el = else_block_rule(p), 1) // else_block? && - (f = finally_block_rule(p), 1) + (f = finally_block_rule(p), 1) // finally_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3366,15 +3366,15 @@ except_block_rule(Parser *p) Token * literal; void *t; if ( - (keyword = _PyPegen_expect_token(p, 520)) + (keyword = _PyPegen_expect_token(p, 520)) // token='except' && - (e = expression_rule(p)) + (e = expression_rule(p)) // expression && - (t = _tmp_48_rule(p), 1) + (t = _tmp_48_rule(p), 1) // ['as' target] && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3399,11 +3399,11 @@ except_block_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 520)) + (keyword = _PyPegen_expect_token(p, 520)) // token='except' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3442,11 +3442,11 @@ finally_block_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 521)) + (keyword = _PyPegen_expect_token(p, 521)) // token='finally' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (a = block_rule(p)) + (a = block_rule(p)) // block ) { res = a; @@ -3484,9 +3484,9 @@ return_stmt_rule(Parser *p) void *a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 500)) + (keyword = _PyPegen_expect_token(p, 500)) // token='return' && - (a = star_expressions_rule(p), 1) + (a = star_expressions_rule(p), 1) // star_expressions? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3533,11 +3533,11 @@ raise_stmt_rule(Parser *p) void *b; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) + (keyword = _PyPegen_expect_token(p, 501)) // token='raise' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = _tmp_49_rule(p), 1) + (b = _tmp_49_rule(p), 1) // ['from' expression] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3560,7 +3560,7 @@ raise_stmt_rule(Parser *p) { // 'raise' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) + (keyword = _PyPegen_expect_token(p, 501)) // token='raise' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3598,9 +3598,9 @@ function_def_rule(Parser *p) asdl_seq* d; stmt_ty f; if ( - (d = decorators_rule(p)) + (d = decorators_rule(p)) // decorators && - (f = function_def_raw_rule(p)) + (f = function_def_raw_rule(p)) // function_def_raw ) { res = _PyPegen_function_def_decorators ( p , d , f ); @@ -3615,7 +3615,7 @@ function_def_rule(Parser *p) { // function_def_raw stmt_ty function_def_raw_var; if ( - (function_def_raw_var = function_def_raw_rule(p)) + (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw ) { res = function_def_raw_var; @@ -3658,23 +3658,23 @@ function_def_raw_rule(Parser *p) void *params; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 522)) + (keyword = _PyPegen_expect_token(p, 522)) // token='def' && - (n = _PyPegen_name_token(p)) + (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (params = params_rule(p), 1) + (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_50_rule(p), 1) + (a = _tmp_50_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) + (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = func_type_comment_rule(p), 1) + (tc = func_type_comment_rule(p), 1) // func_type_comment? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3706,25 +3706,25 @@ function_def_raw_rule(Parser *p) void *params; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 522)) + (keyword = _PyPegen_expect_token(p, 522)) // token='def' && - (n = _PyPegen_name_token(p)) + (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (params = params_rule(p), 1) + (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_51_rule(p), 1) + (a = _tmp_51_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) + (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = func_type_comment_rule(p), 1) + (tc = func_type_comment_rule(p), 1) // func_type_comment? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3765,9 +3765,9 @@ func_type_comment_rule(Parser *p) Token * newline_var; Token * t; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (t = _PyPegen_expect_token(p, TYPE_COMMENT)) + (t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && _PyPegen_lookahead(1, _tmp_52_rule, p) ) @@ -3784,7 +3784,7 @@ func_type_comment_rule(Parser *p) { // invalid_double_type_comments void *invalid_double_type_comments_var; if ( - (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) + (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments ) { res = invalid_double_type_comments_var; @@ -3795,7 +3795,7 @@ func_type_comment_rule(Parser *p) { // TYPE_COMMENT Token * type_comment_var; if ( - (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' ) { res = type_comment_var; @@ -3820,7 +3820,7 @@ params_rule(Parser *p) { // invalid_parameters void *invalid_parameters_var; if ( - (invalid_parameters_var = invalid_parameters_rule(p)) + (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters ) { res = invalid_parameters_var; @@ -3831,7 +3831,7 @@ params_rule(Parser *p) { // parameters arguments_ty parameters_var; if ( - (parameters_var = parameters_rule(p)) + (parameters_var = parameters_rule(p)) // parameters ) { res = parameters_var; @@ -3864,13 +3864,13 @@ parameters_rule(Parser *p) asdl_seq * c; void *d; if ( - (a = slash_no_default_rule(p)) + (a = slash_no_default_rule(p)) // slash_no_default && - (b = _loop0_53_rule(p)) + (b = _loop0_53_rule(p)) // param_no_default* && - (c = _loop0_54_rule(p)) + (c = _loop0_54_rule(p)) // param_with_default* && - (d = star_etc_rule(p), 1) + (d = star_etc_rule(p), 1) // star_etc? ) { res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); @@ -3887,11 +3887,11 @@ parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = slash_with_default_rule(p)) + (a = slash_with_default_rule(p)) // slash_with_default && - (b = _loop0_55_rule(p)) + (b = _loop0_55_rule(p)) // param_with_default* && - (c = star_etc_rule(p), 1) + (c = star_etc_rule(p), 1) // star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); @@ -3908,11 +3908,11 @@ parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = _loop1_56_rule(p)) + (a = _loop1_56_rule(p)) // param_no_default+ && - (b = _loop0_57_rule(p)) + (b = _loop0_57_rule(p)) // param_with_default* && - (c = star_etc_rule(p), 1) + (c = star_etc_rule(p), 1) // star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); @@ -3928,9 +3928,9 @@ parameters_rule(Parser *p) asdl_seq * a; void *b; if ( - (a = _loop1_58_rule(p)) + (a = _loop1_58_rule(p)) // param_with_default+ && - (b = star_etc_rule(p), 1) + (b = star_etc_rule(p), 1) // star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); @@ -3945,7 +3945,7 @@ parameters_rule(Parser *p) { // star_etc StarEtc* a; if ( - (a = star_etc_rule(p)) + (a = star_etc_rule(p)) // star_etc ) { res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); @@ -3976,11 +3976,11 @@ slash_no_default_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _loop1_59_rule(p)) + (a = _loop1_59_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { res = a; @@ -3996,11 +3996,11 @@ slash_no_default_rule(Parser *p) asdl_seq * a; Token * literal; if ( - (a = _loop1_60_rule(p)) + (a = _loop1_60_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = a; @@ -4034,13 +4034,13 @@ slash_with_default_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _loop0_61_rule(p)) + (a = _loop0_61_rule(p)) // param_no_default* && - (b = _loop1_62_rule(p)) + (b = _loop1_62_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { res = _PyPegen_slash_with_default ( p , a , b ); @@ -4057,13 +4057,13 @@ slash_with_default_rule(Parser *p) asdl_seq * b; Token * literal; if ( - (a = _loop0_63_rule(p)) + (a = _loop0_63_rule(p)) // param_no_default* && - (b = _loop1_64_rule(p)) + (b = _loop1_64_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = _PyPegen_slash_with_default ( p , a , b ); @@ -4099,13 +4099,13 @@ star_etc_rule(Parser *p) void *c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = param_no_default_rule(p)) + (a = param_no_default_rule(p)) // param_no_default && - (b = _loop0_65_rule(p)) + (b = _loop0_65_rule(p)) // param_maybe_default* && - (c = kwds_rule(p), 1) + (c = kwds_rule(p), 1) // kwds? ) { res = _PyPegen_star_etc ( p , a , b , c ); @@ -4123,13 +4123,13 @@ star_etc_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_66_rule(p)) + (b = _loop1_66_rule(p)) // param_maybe_default+ && - (c = kwds_rule(p), 1) + (c = kwds_rule(p), 1) // kwds? ) { res = _PyPegen_star_etc ( p , NULL , b , c ); @@ -4144,7 +4144,7 @@ star_etc_rule(Parser *p) { // kwds arg_ty a; if ( - (a = kwds_rule(p)) + (a = kwds_rule(p)) // kwds ) { res = _PyPegen_star_etc ( p , NULL , NULL , a ); @@ -4159,7 +4159,7 @@ star_etc_rule(Parser *p) { // invalid_star_etc void *invalid_star_etc_var; if ( - (invalid_star_etc_var = invalid_star_etc_rule(p)) + (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc ) { res = invalid_star_etc_var; @@ -4185,9 +4185,9 @@ kwds_rule(Parser *p) arg_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = param_no_default_rule(p)) + (a = param_no_default_rule(p)) // param_no_default ) { res = a; @@ -4218,11 +4218,11 @@ param_no_default_rule(Parser *p) Token * literal; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); @@ -4238,11 +4238,11 @@ param_no_default_rule(Parser *p) arg_ty a; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); @@ -4274,13 +4274,13 @@ param_with_default_rule(Parser *p) Token * literal; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { res = _PyPegen_name_default_pair ( p , a , c , tc ); @@ -4297,13 +4297,13 @@ param_with_default_rule(Parser *p) expr_ty c; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = _PyPegen_name_default_pair ( p , a , c , tc ); @@ -4337,13 +4337,13 @@ param_maybe_default_rule(Parser *p) Token * literal; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { res = _PyPegen_name_default_pair ( p , a , c , tc ); @@ -4360,13 +4360,13 @@ param_maybe_default_rule(Parser *p) void *c; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = _PyPegen_name_default_pair ( p , a , c , tc ); @@ -4404,9 +4404,9 @@ param_rule(Parser *p) expr_ty a; void *b; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (b = annotation_rule(p), 1) + (b = annotation_rule(p), 1) // annotation? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4444,9 +4444,9 @@ annotation_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = a; @@ -4476,9 +4476,9 @@ default_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = a; @@ -4507,7 +4507,7 @@ decorators_rule(Parser *p) { // (('@' named_expression NEWLINE))+ asdl_seq * a; if ( - (a = _loop1_67_rule(p)) + (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ ) { res = a; @@ -4537,9 +4537,9 @@ class_def_rule(Parser *p) asdl_seq* a; stmt_ty b; if ( - (a = decorators_rule(p)) + (a = decorators_rule(p)) // decorators && - (b = class_def_raw_rule(p)) + (b = class_def_raw_rule(p)) // class_def_raw ) { res = _PyPegen_class_def_decorators ( p , a , b ); @@ -4554,7 +4554,7 @@ class_def_rule(Parser *p) { // class_def_raw stmt_ty class_def_raw_var; if ( - (class_def_raw_var = class_def_raw_rule(p)) + (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw ) { res = class_def_raw_var; @@ -4591,15 +4591,15 @@ class_def_raw_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 523)) + (keyword = _PyPegen_expect_token(p, 523)) // token='class' && - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_68_rule(p), 1) + (b = _tmp_68_rule(p), 1) // ['(' arguments? ')'] && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (c = block_rule(p)) + (c = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4641,13 +4641,13 @@ block_rule(Parser *p) Token * indent_var; Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (indent_var = _PyPegen_expect_token(p, INDENT)) + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' && - (a = statements_rule(p)) + (a = statements_rule(p)) // statements && - (dedent_var = _PyPegen_expect_token(p, DEDENT)) + (dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT' ) { res = a; @@ -4662,7 +4662,7 @@ block_rule(Parser *p) { // simple_stmt asdl_seq* simple_stmt_var; if ( - (simple_stmt_var = simple_stmt_rule(p)) + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { res = simple_stmt_var; @@ -4673,7 +4673,7 @@ block_rule(Parser *p) { // invalid_block void *invalid_block_var; if ( - (invalid_block_var = invalid_block_rule(p)) + (invalid_block_var = invalid_block_rule(p)) // invalid_block ) { res = invalid_block_var; @@ -4701,9 +4701,9 @@ expressions_list_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_69_rule(p)) + (a = _gather_69_rule(p)) // ','.star_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -4746,11 +4746,11 @@ star_expressions_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = star_expression_rule(p)) + (a = star_expression_rule(p)) // star_expression && - (b = _loop1_71_rule(p)) + (b = _loop1_71_rule(p)) // ((',' star_expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4774,9 +4774,9 @@ star_expressions_rule(Parser *p) expr_ty a; Token * literal; if ( - (a = star_expression_rule(p)) + (a = star_expression_rule(p)) // star_expression && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4799,7 +4799,7 @@ star_expressions_rule(Parser *p) { // star_expression expr_ty star_expression_var; if ( - (star_expression_var = star_expression_rule(p)) + (star_expression_var = star_expression_rule(p)) // star_expression ) { res = star_expression_var; @@ -4835,9 +4835,9 @@ star_expression_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4860,7 +4860,7 @@ star_expression_rule(Parser *p) { // expression expr_ty expression_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression ) { res = expression_var; @@ -4888,9 +4888,9 @@ star_named_expressions_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_72_rule(p)) + (a = _gather_72_rule(p)) // ','.star_named_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -4928,9 +4928,9 @@ star_named_expression_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4953,7 +4953,7 @@ star_named_expression_rule(Parser *p) { // named_expression expr_ty named_expression_var; if ( - (named_expression_var = named_expression_rule(p)) + (named_expression_var = named_expression_rule(p)) // named_expression ) { res = named_expression_var; @@ -4988,11 +4988,11 @@ named_expression_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 53)) + (literal = _PyPegen_expect_token(p, 53)) // token=':=' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5015,9 +5015,9 @@ named_expression_rule(Parser *p) { // expression !':=' expr_ty expression_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { res = expression_var; @@ -5028,7 +5028,7 @@ named_expression_rule(Parser *p) { // invalid_named_expression void *invalid_named_expression_var; if ( - (invalid_named_expression_var = invalid_named_expression_rule(p)) + (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression ) { res = invalid_named_expression_var; @@ -5053,7 +5053,7 @@ annotated_rhs_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -5064,7 +5064,7 @@ annotated_rhs_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -5100,11 +5100,11 @@ expressions_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = _loop1_74_rule(p)) + (b = _loop1_74_rule(p)) // ((',' expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5128,9 +5128,9 @@ expressions_rule(Parser *p) expr_ty a; Token * literal; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5153,7 +5153,7 @@ expressions_rule(Parser *p) { // expression expr_ty expression_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression ) { res = expression_var; @@ -5192,15 +5192,15 @@ expression_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (a = disjunction_rule(p)) + (a = disjunction_rule(p)) // disjunction && - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (b = disjunction_rule(p)) + (b = disjunction_rule(p)) // disjunction && - (keyword_1 = _PyPegen_expect_token(p, 516)) + (keyword_1 = _PyPegen_expect_token(p, 516)) // token='else' && - (c = expression_rule(p)) + (c = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5223,7 +5223,7 @@ expression_rule(Parser *p) { // disjunction expr_ty disjunction_var; if ( - (disjunction_var = disjunction_rule(p)) + (disjunction_var = disjunction_rule(p)) // disjunction ) { res = disjunction_var; @@ -5234,7 +5234,7 @@ expression_rule(Parser *p) { // lambdef expr_ty lambdef_var; if ( - (lambdef_var = lambdef_rule(p)) + (lambdef_var = lambdef_rule(p)) // lambdef ) { res = lambdef_var; @@ -5271,13 +5271,13 @@ lambdef_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 524)) + (keyword = _PyPegen_expect_token(p, 524)) // token='lambda' && - (a = lambda_parameters_rule(p), 1) + (a = lambda_parameters_rule(p), 1) // lambda_parameters? && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5322,13 +5322,13 @@ lambda_parameters_rule(Parser *p) asdl_seq * c; void *d; if ( - (a = lambda_slash_no_default_rule(p)) + (a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default && - (b = _loop0_75_rule(p)) + (b = _loop0_75_rule(p)) // lambda_param_no_default* && - (c = _loop0_76_rule(p)) + (c = _loop0_76_rule(p)) // lambda_param_with_default* && - (d = lambda_star_etc_rule(p), 1) + (d = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); @@ -5345,11 +5345,11 @@ lambda_parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = lambda_slash_with_default_rule(p)) + (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default && - (b = _loop0_77_rule(p)) + (b = _loop0_77_rule(p)) // lambda_param_with_default* && - (c = lambda_star_etc_rule(p), 1) + (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); @@ -5366,11 +5366,11 @@ lambda_parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = _loop1_78_rule(p)) + (a = _loop1_78_rule(p)) // lambda_param_no_default+ && - (b = _loop0_79_rule(p)) + (b = _loop0_79_rule(p)) // lambda_param_with_default* && - (c = lambda_star_etc_rule(p), 1) + (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); @@ -5386,9 +5386,9 @@ lambda_parameters_rule(Parser *p) asdl_seq * a; void *b; if ( - (a = _loop1_80_rule(p)) + (a = _loop1_80_rule(p)) // lambda_param_with_default+ && - (b = lambda_star_etc_rule(p), 1) + (b = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); @@ -5403,7 +5403,7 @@ lambda_parameters_rule(Parser *p) { // lambda_star_etc StarEtc* a; if ( - (a = lambda_star_etc_rule(p)) + (a = lambda_star_etc_rule(p)) // lambda_star_etc ) { res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); @@ -5436,11 +5436,11 @@ lambda_slash_no_default_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _loop1_81_rule(p)) + (a = _loop1_81_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { res = a; @@ -5456,11 +5456,11 @@ lambda_slash_no_default_rule(Parser *p) asdl_seq * a; Token * literal; if ( - (a = _loop1_82_rule(p)) + (a = _loop1_82_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = a; @@ -5494,13 +5494,13 @@ lambda_slash_with_default_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _loop0_83_rule(p)) + (a = _loop0_83_rule(p)) // lambda_param_no_default* && - (b = _loop1_84_rule(p)) + (b = _loop1_84_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { res = _PyPegen_slash_with_default ( p , a , b ); @@ -5517,13 +5517,13 @@ lambda_slash_with_default_rule(Parser *p) asdl_seq * b; Token * literal; if ( - (a = _loop0_85_rule(p)) + (a = _loop0_85_rule(p)) // lambda_param_no_default* && - (b = _loop1_86_rule(p)) + (b = _loop1_86_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = _PyPegen_slash_with_default ( p , a , b ); @@ -5559,13 +5559,13 @@ lambda_star_etc_rule(Parser *p) void *c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = lambda_param_no_default_rule(p)) + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default && - (b = _loop0_87_rule(p)) + (b = _loop0_87_rule(p)) // lambda_param_maybe_default* && - (c = lambda_kwds_rule(p), 1) + (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { res = _PyPegen_star_etc ( p , a , b , c ); @@ -5583,13 +5583,13 @@ lambda_star_etc_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_88_rule(p)) + (b = _loop1_88_rule(p)) // lambda_param_maybe_default+ && - (c = lambda_kwds_rule(p), 1) + (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { res = _PyPegen_star_etc ( p , NULL , b , c ); @@ -5604,7 +5604,7 @@ lambda_star_etc_rule(Parser *p) { // lambda_kwds arg_ty a; if ( - (a = lambda_kwds_rule(p)) + (a = lambda_kwds_rule(p)) // lambda_kwds ) { res = _PyPegen_star_etc ( p , NULL , NULL , a ); @@ -5619,7 +5619,7 @@ lambda_star_etc_rule(Parser *p) { // invalid_lambda_star_etc void *invalid_lambda_star_etc_var; if ( - (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) + (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc ) { res = invalid_lambda_star_etc_var; @@ -5645,9 +5645,9 @@ lambda_kwds_rule(Parser *p) arg_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = lambda_param_no_default_rule(p)) + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = a; @@ -5677,9 +5677,9 @@ lambda_param_no_default_rule(Parser *p) arg_ty a; Token * literal; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { res = a; @@ -5694,9 +5694,9 @@ lambda_param_no_default_rule(Parser *p) { // lambda_param &':' arg_ty a; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = a; @@ -5727,11 +5727,11 @@ lambda_param_with_default_rule(Parser *p) expr_ty c; Token * literal; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { res = _PyPegen_name_default_pair ( p , a , c , NULL ); @@ -5747,11 +5747,11 @@ lambda_param_with_default_rule(Parser *p) arg_ty a; expr_ty c; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = _PyPegen_name_default_pair ( p , a , c , NULL ); @@ -5782,11 +5782,11 @@ lambda_param_maybe_default_rule(Parser *p) void *c; Token * literal; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { res = _PyPegen_name_default_pair ( p , a , c , NULL ); @@ -5802,11 +5802,11 @@ lambda_param_maybe_default_rule(Parser *p) arg_ty a; void *c; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = _PyPegen_name_default_pair ( p , a , c , NULL ); @@ -5843,7 +5843,7 @@ lambda_param_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5891,9 +5891,9 @@ disjunction_rule(Parser *p) expr_ty a; asdl_seq * b; if ( - (a = conjunction_rule(p)) + (a = conjunction_rule(p)) // conjunction && - (b = _loop1_89_rule(p)) + (b = _loop1_89_rule(p)) // (('or' conjunction))+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5916,7 +5916,7 @@ disjunction_rule(Parser *p) { // conjunction expr_ty conjunction_var; if ( - (conjunction_var = conjunction_rule(p)) + (conjunction_var = conjunction_rule(p)) // conjunction ) { res = conjunction_var; @@ -5953,9 +5953,9 @@ conjunction_rule(Parser *p) expr_ty a; asdl_seq * b; if ( - (a = inversion_rule(p)) + (a = inversion_rule(p)) // inversion && - (b = _loop1_90_rule(p)) + (b = _loop1_90_rule(p)) // (('and' inversion))+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5978,7 +5978,7 @@ conjunction_rule(Parser *p) { // inversion expr_ty inversion_var; if ( - (inversion_var = inversion_rule(p)) + (inversion_var = inversion_rule(p)) // inversion ) { res = inversion_var; @@ -6015,9 +6015,9 @@ inversion_rule(Parser *p) expr_ty a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 525)) + (keyword = _PyPegen_expect_token(p, 525)) // token='not' && - (a = inversion_rule(p)) + (a = inversion_rule(p)) // inversion ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6040,7 +6040,7 @@ inversion_rule(Parser *p) { // comparison expr_ty comparison_var; if ( - (comparison_var = comparison_rule(p)) + (comparison_var = comparison_rule(p)) // comparison ) { res = comparison_var; @@ -6075,9 +6075,9 @@ comparison_rule(Parser *p) expr_ty a; asdl_seq * b; if ( - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or && - (b = _loop1_91_rule(p)) + (b = _loop1_91_rule(p)) // compare_op_bitwise_or_pair+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6100,7 +6100,7 @@ comparison_rule(Parser *p) { // bitwise_or expr_ty bitwise_or_var; if ( - (bitwise_or_var = bitwise_or_rule(p)) + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or ) { res = bitwise_or_var; @@ -6135,7 +6135,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // eq_bitwise_or CmpopExprPair* eq_bitwise_or_var; if ( - (eq_bitwise_or_var = eq_bitwise_or_rule(p)) + (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or ) { res = eq_bitwise_or_var; @@ -6146,7 +6146,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // noteq_bitwise_or CmpopExprPair* noteq_bitwise_or_var; if ( - (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) + (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or ) { res = noteq_bitwise_or_var; @@ -6157,7 +6157,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // lte_bitwise_or CmpopExprPair* lte_bitwise_or_var; if ( - (lte_bitwise_or_var = lte_bitwise_or_rule(p)) + (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or ) { res = lte_bitwise_or_var; @@ -6168,7 +6168,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // lt_bitwise_or CmpopExprPair* lt_bitwise_or_var; if ( - (lt_bitwise_or_var = lt_bitwise_or_rule(p)) + (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or ) { res = lt_bitwise_or_var; @@ -6179,7 +6179,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // gte_bitwise_or CmpopExprPair* gte_bitwise_or_var; if ( - (gte_bitwise_or_var = gte_bitwise_or_rule(p)) + (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or ) { res = gte_bitwise_or_var; @@ -6190,7 +6190,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // gt_bitwise_or CmpopExprPair* gt_bitwise_or_var; if ( - (gt_bitwise_or_var = gt_bitwise_or_rule(p)) + (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or ) { res = gt_bitwise_or_var; @@ -6201,7 +6201,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // notin_bitwise_or CmpopExprPair* notin_bitwise_or_var; if ( - (notin_bitwise_or_var = notin_bitwise_or_rule(p)) + (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or ) { res = notin_bitwise_or_var; @@ -6212,7 +6212,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // in_bitwise_or CmpopExprPair* in_bitwise_or_var; if ( - (in_bitwise_or_var = in_bitwise_or_rule(p)) + (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or ) { res = in_bitwise_or_var; @@ -6223,7 +6223,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // isnot_bitwise_or CmpopExprPair* isnot_bitwise_or_var; if ( - (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) + (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or ) { res = isnot_bitwise_or_var; @@ -6234,7 +6234,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // is_bitwise_or CmpopExprPair* is_bitwise_or_var; if ( - (is_bitwise_or_var = is_bitwise_or_rule(p)) + (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or ) { res = is_bitwise_or_var; @@ -6260,9 +6260,9 @@ eq_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 27)) + (literal = _PyPegen_expect_token(p, 27)) // token='==' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); @@ -6292,9 +6292,9 @@ noteq_bitwise_or_rule(Parser *p) void *_tmp_92_var; expr_ty a; if ( - (_tmp_92_var = _tmp_92_rule(p)) + (_tmp_92_var = _tmp_92_rule(p)) // '!=' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); @@ -6324,9 +6324,9 @@ lte_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 29)) + (literal = _PyPegen_expect_token(p, 29)) // token='<=' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); @@ -6356,9 +6356,9 @@ lt_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 20)) + (literal = _PyPegen_expect_token(p, 20)) // token='<' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); @@ -6388,9 +6388,9 @@ gte_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 30)) + (literal = _PyPegen_expect_token(p, 30)) // token='>=' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); @@ -6420,9 +6420,9 @@ gt_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 21)) + (literal = _PyPegen_expect_token(p, 21)) // token='>' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); @@ -6453,11 +6453,11 @@ notin_bitwise_or_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 525)) + (keyword = _PyPegen_expect_token(p, 525)) // token='not' && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); @@ -6487,9 +6487,9 @@ in_bitwise_or_rule(Parser *p) expr_ty a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 518)) + (keyword = _PyPegen_expect_token(p, 518)) // token='in' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , In , a ); @@ -6520,11 +6520,11 @@ isnot_bitwise_or_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 526)) + (keyword = _PyPegen_expect_token(p, 526)) // token='is' && - (keyword_1 = _PyPegen_expect_token(p, 525)) + (keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); @@ -6554,9 +6554,9 @@ is_bitwise_or_rule(Parser *p) expr_ty a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 526)) + (keyword = _PyPegen_expect_token(p, 526)) // token='is' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , Is , a ); @@ -6620,11 +6620,11 @@ bitwise_or_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or && - (literal = _PyPegen_expect_token(p, 18)) + (literal = _PyPegen_expect_token(p, 18)) // token='|' && - (b = bitwise_xor_rule(p)) + (b = bitwise_xor_rule(p)) // bitwise_xor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6647,7 +6647,7 @@ bitwise_or_raw(Parser *p) { // bitwise_xor expr_ty bitwise_xor_var; if ( - (bitwise_xor_var = bitwise_xor_rule(p)) + (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor ) { res = bitwise_xor_var; @@ -6707,11 +6707,11 @@ bitwise_xor_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = bitwise_xor_rule(p)) + (a = bitwise_xor_rule(p)) // bitwise_xor && - (literal = _PyPegen_expect_token(p, 32)) + (literal = _PyPegen_expect_token(p, 32)) // token='^' && - (b = bitwise_and_rule(p)) + (b = bitwise_and_rule(p)) // bitwise_and ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6734,7 +6734,7 @@ bitwise_xor_raw(Parser *p) { // bitwise_and expr_ty bitwise_and_var; if ( - (bitwise_and_var = bitwise_and_rule(p)) + (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and ) { res = bitwise_and_var; @@ -6794,11 +6794,11 @@ bitwise_and_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = bitwise_and_rule(p)) + (a = bitwise_and_rule(p)) // bitwise_and && - (literal = _PyPegen_expect_token(p, 19)) + (literal = _PyPegen_expect_token(p, 19)) // token='&' && - (b = shift_expr_rule(p)) + (b = shift_expr_rule(p)) // shift_expr ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6821,7 +6821,7 @@ bitwise_and_raw(Parser *p) { // shift_expr expr_ty shift_expr_var; if ( - (shift_expr_var = shift_expr_rule(p)) + (shift_expr_var = shift_expr_rule(p)) // shift_expr ) { res = shift_expr_var; @@ -6881,11 +6881,11 @@ shift_expr_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = shift_expr_rule(p)) + (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 33)) + (literal = _PyPegen_expect_token(p, 33)) // token='<<' && - (b = sum_rule(p)) + (b = sum_rule(p)) // sum ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6910,11 +6910,11 @@ shift_expr_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = shift_expr_rule(p)) + (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 34)) + (literal = _PyPegen_expect_token(p, 34)) // token='>>' && - (b = sum_rule(p)) + (b = sum_rule(p)) // sum ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6937,7 +6937,7 @@ shift_expr_raw(Parser *p) { // sum expr_ty sum_var; if ( - (sum_var = sum_rule(p)) + (sum_var = sum_rule(p)) // sum ) { res = sum_var; @@ -6997,11 +6997,11 @@ sum_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = sum_rule(p)) + (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 14)) + (literal = _PyPegen_expect_token(p, 14)) // token='+' && - (b = term_rule(p)) + (b = term_rule(p)) // term ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7026,11 +7026,11 @@ sum_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = sum_rule(p)) + (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 15)) + (literal = _PyPegen_expect_token(p, 15)) // token='-' && - (b = term_rule(p)) + (b = term_rule(p)) // term ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7053,7 +7053,7 @@ sum_raw(Parser *p) { // term expr_ty term_var; if ( - (term_var = term_rule(p)) + (term_var = term_rule(p)) // term ) { res = term_var; @@ -7119,11 +7119,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7148,11 +7148,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7177,11 +7177,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 47)) + (literal = _PyPegen_expect_token(p, 47)) // token='//' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7206,11 +7206,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 24)) + (literal = _PyPegen_expect_token(p, 24)) // token='%' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7235,11 +7235,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 49)) + (literal = _PyPegen_expect_token(p, 49)) // token='@' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7262,7 +7262,7 @@ term_raw(Parser *p) { // factor expr_ty factor_var; if ( - (factor_var = factor_rule(p)) + (factor_var = factor_rule(p)) // factor ) { res = factor_var; @@ -7298,9 +7298,9 @@ factor_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 14)) + (literal = _PyPegen_expect_token(p, 14)) // token='+' && - (a = factor_rule(p)) + (a = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7324,9 +7324,9 @@ factor_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 15)) + (literal = _PyPegen_expect_token(p, 15)) // token='-' && - (a = factor_rule(p)) + (a = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7350,9 +7350,9 @@ factor_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 31)) + (literal = _PyPegen_expect_token(p, 31)) // token='~' && - (a = factor_rule(p)) + (a = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7375,7 +7375,7 @@ factor_rule(Parser *p) { // power expr_ty power_var; if ( - (power_var = power_rule(p)) + (power_var = power_rule(p)) // power ) { res = power_var; @@ -7411,11 +7411,11 @@ power_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = await_primary_rule(p)) + (a = await_primary_rule(p)) // await_primary && - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7438,7 +7438,7 @@ power_rule(Parser *p) { // await_primary expr_ty await_primary_var; if ( - (await_primary_var = await_primary_rule(p)) + (await_primary_var = await_primary_rule(p)) // await_primary ) { res = await_primary_var; @@ -7474,9 +7474,9 @@ await_primary_rule(Parser *p) expr_ty a; Token * await_var; if ( - (await_var = _PyPegen_expect_token(p, AWAIT)) + (await_var = _PyPegen_expect_token(p, AWAIT)) // token='AWAIT' && - (a = primary_rule(p)) + (a = primary_rule(p)) // primary ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7499,7 +7499,7 @@ await_primary_rule(Parser *p) { // primary expr_ty primary_var; if ( - (primary_var = primary_rule(p)) + (primary_var = primary_rule(p)) // primary ) { res = primary_var; @@ -7565,11 +7565,11 @@ primary_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7593,9 +7593,9 @@ primary_raw(Parser *p) expr_ty a; expr_ty b; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (b = genexp_rule(p)) + (b = genexp_rule(p)) // genexp ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7621,13 +7621,13 @@ primary_raw(Parser *p) Token * literal; Token * literal_1; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = arguments_rule(p), 1) + (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7653,13 +7653,13 @@ primary_raw(Parser *p) Token * literal; Token * literal_1; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7682,7 +7682,7 @@ primary_raw(Parser *p) { // atom expr_ty atom_var; if ( - (atom_var = atom_rule(p)) + (atom_var = atom_rule(p)) // atom ) { res = atom_var; @@ -7715,9 +7715,9 @@ slices_rule(Parser *p) { // slice !',' expr_ty a; if ( - (a = slice_rule(p)) + (a = slice_rule(p)) // slice && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { res = a; @@ -7734,9 +7734,9 @@ slices_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_93_rule(p)) + (a = _gather_93_rule(p)) // ','.slice+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7784,13 +7784,13 @@ slice_rule(Parser *p) void *c; Token * literal; if ( - (a = expression_rule(p), 1) + (a = expression_rule(p), 1) // expression? && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p), 1) + (b = expression_rule(p), 1) // expression? && - (c = _tmp_95_rule(p), 1) + (c = _tmp_95_rule(p), 1) // [':' expression?] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7813,7 +7813,7 @@ slice_rule(Parser *p) { // expression expr_ty a; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = a; @@ -7861,7 +7861,7 @@ atom_rule(Parser *p) { // NAME expr_ty name_var; if ( - (name_var = _PyPegen_name_token(p)) + (name_var = _PyPegen_name_token(p)) // NAME ) { res = name_var; @@ -7872,7 +7872,7 @@ atom_rule(Parser *p) { // 'True' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 527)) + (keyword = _PyPegen_expect_token(p, 527)) // token='True' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7895,7 +7895,7 @@ atom_rule(Parser *p) { // 'False' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 528)) + (keyword = _PyPegen_expect_token(p, 528)) // token='False' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7918,7 +7918,7 @@ atom_rule(Parser *p) { // 'None' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 529)) + (keyword = _PyPegen_expect_token(p, 529)) // token='None' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7941,7 +7941,7 @@ atom_rule(Parser *p) { // '__new_parser__' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 530)) + (keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' ) { res = RAISE_SYNTAX_ERROR ( "You found it!" ); @@ -7958,7 +7958,7 @@ atom_rule(Parser *p) if ( _PyPegen_lookahead(1, _PyPegen_string_token, p) && - (strings_var = strings_rule(p)) + (strings_var = strings_rule(p)) // strings ) { res = strings_var; @@ -7969,7 +7969,7 @@ atom_rule(Parser *p) { // NUMBER expr_ty number_var; if ( - (number_var = _PyPegen_number_token(p)) + (number_var = _PyPegen_number_token(p)) // NUMBER ) { res = number_var; @@ -7980,9 +7980,9 @@ atom_rule(Parser *p) { // &'(' (tuple | group | genexp) void *_tmp_96_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='(' && - (_tmp_96_var = _tmp_96_rule(p)) + (_tmp_96_var = _tmp_96_rule(p)) // tuple | group | genexp ) { res = _tmp_96_var; @@ -7993,9 +7993,9 @@ atom_rule(Parser *p) { // &'[' (list | listcomp) void *_tmp_97_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='[' && - (_tmp_97_var = _tmp_97_rule(p)) + (_tmp_97_var = _tmp_97_rule(p)) // list | listcomp ) { res = _tmp_97_var; @@ -8006,9 +8006,9 @@ atom_rule(Parser *p) { // &'{' (dict | set | dictcomp | setcomp) void *_tmp_98_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{' && - (_tmp_98_var = _tmp_98_rule(p)) + (_tmp_98_var = _tmp_98_rule(p)) // dict | set | dictcomp | setcomp ) { res = _tmp_98_var; @@ -8019,7 +8019,7 @@ atom_rule(Parser *p) { // '...' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 52)) + (literal = _PyPegen_expect_token(p, 52)) // token='...' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8058,7 +8058,7 @@ strings_rule(Parser *p) { // STRING+ asdl_seq * a; if ( - (a = _loop1_99_rule(p)) + (a = _loop1_99_rule(p)) // STRING+ ) { res = _PyPegen_concatenate_strings ( p , a ); @@ -8098,11 +8098,11 @@ list_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = star_named_expressions_rule(p), 1) + (a = star_named_expressions_rule(p), 1) // star_named_expressions? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8150,13 +8150,13 @@ listcomp_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8179,7 +8179,7 @@ listcomp_rule(Parser *p) { // invalid_comprehension void *invalid_comprehension_var; if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { res = invalid_comprehension_var; @@ -8214,11 +8214,11 @@ tuple_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_100_rule(p), 1) + (a = _tmp_100_rule(p), 1) // [star_named_expression ',' star_named_expressions?] && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8257,11 +8257,11 @@ group_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_101_rule(p)) + (a = _tmp_101_rule(p)) // yield_expr | named_expression && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = a; @@ -8301,13 +8301,13 @@ genexp_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8330,7 +8330,7 @@ genexp_rule(Parser *p) { // invalid_comprehension void *invalid_comprehension_var; if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { res = invalid_comprehension_var; @@ -8365,11 +8365,11 @@ set_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = expressions_list_rule(p)) + (a = expressions_list_rule(p)) // expressions_list && - (literal_1 = _PyPegen_expect_token(p, 26)) + (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8417,13 +8417,13 @@ setcomp_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) + (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8446,7 +8446,7 @@ setcomp_rule(Parser *p) { // invalid_comprehension void *invalid_comprehension_var; if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { res = invalid_comprehension_var; @@ -8481,11 +8481,11 @@ dict_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = kvpairs_rule(p), 1) + (a = kvpairs_rule(p), 1) // kvpairs? && - (literal_1 = _PyPegen_expect_token(p, 26)) + (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8533,13 +8533,13 @@ dictcomp_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = kvpair_rule(p)) + (a = kvpair_rule(p)) // kvpair && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) + (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8578,9 +8578,9 @@ kvpairs_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_102_rule(p)) + (a = _gather_102_rule(p)) // ','.kvpair+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -8610,9 +8610,9 @@ kvpair_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_key_value_pair ( p , NULL , a ); @@ -8629,11 +8629,11 @@ kvpair_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { res = _PyPegen_key_value_pair ( p , a , b ); @@ -8662,7 +8662,7 @@ for_if_clauses_rule(Parser *p) { // for_if_clause+ asdl_seq * _loop1_104_var; if ( - (_loop1_104_var = _loop1_104_rule(p)) + (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ ) { res = _loop1_104_var; @@ -8694,17 +8694,17 @@ for_if_clause_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (a = star_targets_rule(p)) + (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (b = disjunction_rule(p)) + (b = disjunction_rule(p)) // disjunction && - (c = _loop0_105_rule(p)) + (c = _loop0_105_rule(p)) // (('if' disjunction))* ) { res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); @@ -8723,15 +8723,15 @@ for_if_clause_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (a = star_targets_rule(p)) + (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (b = disjunction_rule(p)) + (b = disjunction_rule(p)) // disjunction && - (c = _loop0_106_rule(p)) + (c = _loop0_106_rule(p)) // (('if' disjunction))* ) { res = _Py_comprehension ( a , b , c , 0 , p -> arena ); @@ -8770,11 +8770,11 @@ yield_expr_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 504)) + (keyword = _PyPegen_expect_token(p, 504)) // token='yield' && - (keyword_1 = _PyPegen_expect_token(p, 514)) + (keyword_1 = _PyPegen_expect_token(p, 514)) // token='from' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8798,9 +8798,9 @@ yield_expr_rule(Parser *p) void *a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 504)) + (keyword = _PyPegen_expect_token(p, 504)) // token='yield' && - (a = star_expressions_rule(p), 1) + (a = star_expressions_rule(p), 1) // star_expressions? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8841,11 +8841,11 @@ arguments_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = args_rule(p)) + (a = args_rule(p)) // args && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = a; @@ -8860,7 +8860,7 @@ arguments_rule(Parser *p) { // incorrect_arguments void *incorrect_arguments_var; if ( - (incorrect_arguments_var = incorrect_arguments_rule(p)) + (incorrect_arguments_var = incorrect_arguments_rule(p)) // incorrect_arguments ) { res = incorrect_arguments_var; @@ -8895,9 +8895,9 @@ args_rule(Parser *p) expr_ty a; void *b; if ( - (a = starred_expression_rule(p)) + (a = starred_expression_rule(p)) // starred_expression && - (b = _tmp_107_rule(p), 1) + (b = _tmp_107_rule(p), 1) // [',' args] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8920,7 +8920,7 @@ args_rule(Parser *p) { // kwargs asdl_seq* a; if ( - (a = kwargs_rule(p)) + (a = kwargs_rule(p)) // kwargs ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8944,9 +8944,9 @@ args_rule(Parser *p) expr_ty a; void *b; if ( - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (b = _tmp_108_rule(p), 1) + (b = _tmp_108_rule(p), 1) // [',' args] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8988,11 +8988,11 @@ kwargs_rule(Parser *p) asdl_seq * b; Token * literal; if ( - (a = _gather_109_rule(p)) + (a = _gather_109_rule(p)) // ','.kwarg_or_starred+ && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _gather_111_rule(p)) + (b = _gather_111_rule(p)) // ','.kwarg_or_double_starred+ ) { res = _PyPegen_join_sequences ( p , a , b ); @@ -9007,7 +9007,7 @@ kwargs_rule(Parser *p) { // ','.kwarg_or_starred+ asdl_seq * _gather_113_var; if ( - (_gather_113_var = _gather_113_rule(p)) + (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ ) { res = _gather_113_var; @@ -9018,7 +9018,7 @@ kwargs_rule(Parser *p) { // ','.kwarg_or_double_starred+ asdl_seq * _gather_115_var; if ( - (_gather_115_var = _gather_115_rule(p)) + (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ ) { res = _gather_115_var; @@ -9052,9 +9052,9 @@ starred_expression_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9101,11 +9101,11 @@ kwarg_or_starred_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9128,7 +9128,7 @@ kwarg_or_starred_rule(Parser *p) { // starred_expression expr_ty a; if ( - (a = starred_expression_rule(p)) + (a = starred_expression_rule(p)) // starred_expression ) { res = _PyPegen_keyword_or_starred ( p , a , 0 ); @@ -9167,11 +9167,11 @@ kwarg_or_double_starred_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9195,9 +9195,9 @@ kwarg_or_double_starred_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9242,9 +9242,9 @@ star_targets_rule(Parser *p) { // star_target !',' expr_ty a; if ( - (a = star_target_rule(p)) + (a = star_target_rule(p)) // star_target && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { res = a; @@ -9262,11 +9262,11 @@ star_targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = star_target_rule(p)) + (a = star_target_rule(p)) // star_target && - (b = _loop0_117_rule(p)) + (b = _loop0_117_rule(p)) // ((',' star_target))* && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9305,9 +9305,9 @@ star_targets_seq_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_118_rule(p)) + (a = _gather_118_rule(p)) // ','.star_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -9351,9 +9351,9 @@ star_target_rule(Parser *p) void *a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = _tmp_120_rule(p)) + (a = _tmp_120_rule(p)) // !'*' star_target ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9378,11 +9378,11 @@ star_target_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9410,13 +9410,13 @@ star_target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9441,7 +9441,7 @@ star_target_rule(Parser *p) { // star_atom expr_ty star_atom_var; if ( - (star_atom_var = star_atom_rule(p)) + (star_atom_var = star_atom_rule(p)) // star_atom ) { res = star_atom_var; @@ -9479,7 +9479,7 @@ star_atom_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -9496,11 +9496,11 @@ star_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = star_target_rule(p)) + (a = star_target_rule(p)) // star_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -9517,11 +9517,11 @@ star_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = star_targets_seq_rule(p), 1) + (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9546,11 +9546,11 @@ star_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = star_targets_seq_rule(p), 1) + (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9590,7 +9590,7 @@ inside_paren_ann_assign_target_rule(Parser *p) { // ann_assign_subscript_attribute_target expr_ty ann_assign_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) + (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target ) { res = ann_assign_subscript_attribute_target_var; @@ -9601,7 +9601,7 @@ inside_paren_ann_assign_target_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -9618,11 +9618,11 @@ inside_paren_ann_assign_target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = inside_paren_ann_assign_target_rule(p)) + (a = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = a; @@ -9663,11 +9663,11 @@ ann_assign_subscript_attribute_target_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9695,13 +9695,13 @@ ann_assign_subscript_attribute_target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9742,9 +9742,9 @@ del_targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_121_rule(p)) + (a = _gather_121_rule(p)) // ','.del_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -9788,11 +9788,11 @@ del_target_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9820,13 +9820,13 @@ del_target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9851,7 +9851,7 @@ del_target_rule(Parser *p) { // del_t_atom expr_ty del_t_atom_var; if ( - (del_t_atom_var = del_t_atom_rule(p)) + (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom ) { res = del_t_atom_var; @@ -9885,7 +9885,7 @@ del_t_atom_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_set_expr_context ( p , a , Del ); @@ -9902,11 +9902,11 @@ del_t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = del_target_rule(p)) + (a = del_target_rule(p)) // del_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = _PyPegen_set_expr_context ( p , a , Del ); @@ -9923,11 +9923,11 @@ del_t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = del_targets_rule(p), 1) + (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9952,11 +9952,11 @@ del_t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = del_targets_rule(p), 1) + (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9995,9 +9995,9 @@ targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_123_rule(p)) + (a = _gather_123_rule(p)) // ','.target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -10041,11 +10041,11 @@ target_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -10073,13 +10073,13 @@ target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -10104,7 +10104,7 @@ target_rule(Parser *p) { // t_atom expr_ty t_atom_var; if ( - (t_atom_var = t_atom_rule(p)) + (t_atom_var = t_atom_rule(p)) // t_atom ) { res = t_atom_var; @@ -10170,11 +10170,11 @@ t_primary_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10202,13 +10202,13 @@ t_primary_raw(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10234,9 +10234,9 @@ t_primary_raw(Parser *p) expr_ty a; expr_ty b; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (b = genexp_rule(p)) + (b = genexp_rule(p)) // genexp && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10264,13 +10264,13 @@ t_primary_raw(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = arguments_rule(p), 1) + (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10295,7 +10295,7 @@ t_primary_raw(Parser *p) { // atom &t_lookahead expr_ty a; if ( - (a = atom_rule(p)) + (a = atom_rule(p)) // atom && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10326,7 +10326,7 @@ t_lookahead_rule(Parser *p) { // '(' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' ) { res = literal; @@ -10337,7 +10337,7 @@ t_lookahead_rule(Parser *p) { // '[' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' ) { res = literal; @@ -10348,7 +10348,7 @@ t_lookahead_rule(Parser *p) { // '.' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' ) { res = literal; @@ -10381,7 +10381,7 @@ t_atom_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -10398,11 +10398,11 @@ t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = target_rule(p)) + (a = target_rule(p)) // target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -10419,11 +10419,11 @@ t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = targets_rule(p), 1) + (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -10448,11 +10448,11 @@ t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = targets_rule(p), 1) + (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -10494,11 +10494,11 @@ incorrect_arguments_rule(Parser *p) Token * literal; Token * literal_1; if ( - (args_var = args_rule(p)) + (args_var = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) + (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' ) { res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); @@ -10517,13 +10517,13 @@ incorrect_arguments_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (for_if_clauses_var = for_if_clauses_rule(p)) + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (opt_var = _tmp_125_rule(p), 1) + (opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] ) { res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); @@ -10540,11 +10540,11 @@ incorrect_arguments_rule(Parser *p) expr_ty args_var; Token * literal; if ( - (a = args_rule(p)) + (a = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (args_var = args_rule(p)) + (args_var = args_rule(p)) // args ) { res = _PyPegen_arguments_parsing_error ( p , a ); @@ -10575,11 +10575,11 @@ invalid_named_expression_rule(Parser *p) expr_ty expression_var; Token * literal; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 53)) + (literal = _PyPegen_expect_token(p, 53)) // token=':=' && - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression ) { res = RAISE_SYNTAX_ERROR ( "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); @@ -10613,9 +10613,9 @@ invalid_assignment_rule(Parser *p) expr_ty list_var; Token * literal; if ( - (list_var = list_rule(p)) + (list_var = list_rule(p)) // list && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' ) { res = RAISE_SYNTAX_ERROR ( "only single target (not list) can be annotated" ); @@ -10631,9 +10631,9 @@ invalid_assignment_rule(Parser *p) Token * literal; expr_ty tuple_var; if ( - (tuple_var = tuple_rule(p)) + (tuple_var = tuple_rule(p)) // tuple && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' ) { res = RAISE_SYNTAX_ERROR ( "only single target (not tuple) can be annotated" ); @@ -10652,13 +10652,13 @@ invalid_assignment_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (expression_var_1 = expression_rule(p)) + (expression_var_1 = expression_rule(p)) // expression && - (opt_var = _tmp_126_rule(p), 1) + (opt_var = _tmp_126_rule(p), 1) // ['=' annotated_rhs] ) { res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); @@ -10675,11 +10675,11 @@ invalid_assignment_rule(Parser *p) void *_tmp_128_var; expr_ty a; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (_tmp_127_var = _tmp_127_rule(p)) + (_tmp_127_var = _tmp_127_rule(p)) // '=' | augassign && - (_tmp_128_var = _tmp_128_rule(p)) + (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions ) { res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); @@ -10708,9 +10708,9 @@ invalid_block_rule(Parser *p) { // NEWLINE !INDENT Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); @@ -10742,13 +10742,13 @@ invalid_comprehension_rule(Parser *p) asdl_seq* for_if_clauses_var; Token * literal; if ( - (_tmp_129_var = _tmp_129_rule(p)) + (_tmp_129_var = _tmp_129_rule(p)) // '[' | '(' | '{' && - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (for_if_clauses_var = for_if_clauses_rule(p)) + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { res = RAISE_SYNTAX_ERROR ( "iterable unpacking cannot be used in comprehension" ); @@ -10780,11 +10780,11 @@ invalid_parameters_rule(Parser *p) void *_tmp_131_var; arg_ty param_no_default_var; if ( - (_loop0_130_var = _loop0_130_rule(p)) + (_loop0_130_var = _loop0_130_rule(p)) // param_no_default* && - (_tmp_131_var = _tmp_131_rule(p)) + (_tmp_131_var = _tmp_131_rule(p)) // slash_with_default | param_with_default+ && - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); @@ -10814,9 +10814,9 @@ invalid_star_etc_rule(Parser *p) void *_tmp_132_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_132_var = _tmp_132_rule(p)) + (_tmp_132_var = _tmp_132_rule(p)) // ')' | ',' (')' | '**') ) { res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); @@ -10846,9 +10846,9 @@ invalid_lambda_star_etc_rule(Parser *p) void *_tmp_133_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_133_var = _tmp_133_rule(p)) + (_tmp_133_var = _tmp_133_rule(p)) // ':' | ',' (':' | '**') ) { res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); @@ -10881,15 +10881,15 @@ invalid_double_type_comments_rule(Parser *p) Token * type_comment_var; Token * type_comment_var_1; if ( - (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (type_comment_var_1 = _PyPegen_expect_token(p, TYPE_COMMENT)) + (type_comment_var_1 = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && - (newline_var_1 = _PyPegen_expect_token(p, NEWLINE)) + (newline_var_1 = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (indent_var = _PyPegen_expect_token(p, INDENT)) + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); @@ -10926,7 +10926,7 @@ _loop0_1_rule(Parser *p) { // NEWLINE Token * newline_var; while ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = newline_var; @@ -10975,7 +10975,7 @@ _loop0_2_rule(Parser *p) { // NEWLINE Token * newline_var; while ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = newline_var; @@ -11025,9 +11025,9 @@ _loop0_4_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { res = elem; @@ -11074,9 +11074,9 @@ _gather_3_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_4_rule(p)) + (seq = _loop0_4_rule(p)) // _loop0_4 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11110,9 +11110,9 @@ _loop0_6_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { res = elem; @@ -11159,9 +11159,9 @@ _gather_5_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_6_rule(p)) + (seq = _loop0_6_rule(p)) // _loop0_6 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11195,9 +11195,9 @@ _loop0_8_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { res = elem; @@ -11244,9 +11244,9 @@ _gather_7_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_8_rule(p)) + (seq = _loop0_8_rule(p)) // _loop0_8 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11280,9 +11280,9 @@ _loop0_10_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { res = elem; @@ -11329,9 +11329,9 @@ _gather_9_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_10_rule(p)) + (seq = _loop0_10_rule(p)) // _loop0_10 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11364,7 +11364,7 @@ _loop1_11_rule(Parser *p) { // statement asdl_seq* statement_var; while ( - (statement_var = statement_rule(p)) + (statement_var = statement_rule(p)) // statement ) { res = statement_var; @@ -11418,9 +11418,9 @@ _loop0_13_rule(Parser *p) stmt_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 13)) + (literal = _PyPegen_expect_token(p, 13)) // token=';' && - (elem = small_stmt_rule(p)) + (elem = small_stmt_rule(p)) // small_stmt ) { res = elem; @@ -11467,9 +11467,9 @@ _gather_12_rule(Parser *p) stmt_ty elem; asdl_seq * seq; if ( - (elem = small_stmt_rule(p)) + (elem = small_stmt_rule(p)) // small_stmt && - (seq = _loop0_13_rule(p)) + (seq = _loop0_13_rule(p)) // _loop0_13 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11494,7 +11494,7 @@ _tmp_14_rule(Parser *p) { // 'import' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) + (keyword = _PyPegen_expect_token(p, 513)) // token='import' ) { res = keyword; @@ -11505,7 +11505,7 @@ _tmp_14_rule(Parser *p) { // 'from' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (keyword = _PyPegen_expect_token(p, 514)) // token='from' ) { res = keyword; @@ -11530,7 +11530,7 @@ _tmp_15_rule(Parser *p) { // 'def' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 522)) + (keyword = _PyPegen_expect_token(p, 522)) // token='def' ) { res = keyword; @@ -11541,7 +11541,7 @@ _tmp_15_rule(Parser *p) { // '@' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 49)) + (literal = _PyPegen_expect_token(p, 49)) // token='@' ) { res = literal; @@ -11552,7 +11552,7 @@ _tmp_15_rule(Parser *p) { // ASYNC Token * async_var; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { res = async_var; @@ -11577,7 +11577,7 @@ _tmp_16_rule(Parser *p) { // 'class' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 523)) + (keyword = _PyPegen_expect_token(p, 523)) // token='class' ) { res = keyword; @@ -11588,7 +11588,7 @@ _tmp_16_rule(Parser *p) { // '@' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 49)) + (literal = _PyPegen_expect_token(p, 49)) // token='@' ) { res = literal; @@ -11613,7 +11613,7 @@ _tmp_17_rule(Parser *p) { // 'with' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' ) { res = keyword; @@ -11624,7 +11624,7 @@ _tmp_17_rule(Parser *p) { // ASYNC Token * async_var; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { res = async_var; @@ -11649,7 +11649,7 @@ _tmp_18_rule(Parser *p) { // 'for' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' ) { res = keyword; @@ -11660,7 +11660,7 @@ _tmp_18_rule(Parser *p) { // ASYNC Token * async_var; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { res = async_var; @@ -11686,9 +11686,9 @@ _tmp_19_rule(Parser *p) expr_ty d; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (d = annotated_rhs_rule(p)) + (d = annotated_rhs_rule(p)) // annotated_rhs ) { res = d; @@ -11719,11 +11719,11 @@ _tmp_20_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = inside_paren_ann_assign_target_rule(p)) + (b = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = b; @@ -11738,7 +11738,7 @@ _tmp_20_rule(Parser *p) { // ann_assign_subscript_attribute_target expr_ty ann_assign_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) + (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target ) { res = ann_assign_subscript_attribute_target_var; @@ -11764,9 +11764,9 @@ _tmp_21_rule(Parser *p) expr_ty d; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (d = annotated_rhs_rule(p)) + (d = annotated_rhs_rule(p)) // annotated_rhs ) { res = d; @@ -11803,7 +11803,7 @@ _loop1_22_rule(Parser *p) { // (star_targets '=') void *_tmp_134_var; while ( - (_tmp_134_var = _tmp_134_rule(p)) + (_tmp_134_var = _tmp_134_rule(p)) // star_targets '=' ) { res = _tmp_134_var; @@ -11848,7 +11848,7 @@ _tmp_23_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -11859,7 +11859,7 @@ _tmp_23_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -11884,7 +11884,7 @@ _tmp_24_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -11895,7 +11895,7 @@ _tmp_24_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -11929,9 +11929,9 @@ _loop0_26_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME ) { res = elem; @@ -11978,9 +11978,9 @@ _gather_25_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME && - (seq = _loop0_26_rule(p)) + (seq = _loop0_26_rule(p)) // _loop0_26 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12014,9 +12014,9 @@ _loop0_28_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME ) { res = elem; @@ -12063,9 +12063,9 @@ _gather_27_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME && - (seq = _loop0_28_rule(p)) + (seq = _loop0_28_rule(p)) // _loop0_28 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12091,9 +12091,9 @@ _tmp_29_rule(Parser *p) Token * literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { res = z; @@ -12130,7 +12130,7 @@ _loop0_30_rule(Parser *p) { // ('.' | '...') void *_tmp_135_var; while ( - (_tmp_135_var = _tmp_135_rule(p)) + (_tmp_135_var = _tmp_135_rule(p)) // '.' | '...' ) { res = _tmp_135_var; @@ -12179,7 +12179,7 @@ _loop1_31_rule(Parser *p) { // ('.' | '...') void *_tmp_136_var; while ( - (_tmp_136_var = _tmp_136_rule(p)) + (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' ) { res = _tmp_136_var; @@ -12233,9 +12233,9 @@ _loop0_33_rule(Parser *p) alias_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = import_from_as_name_rule(p)) + (elem = import_from_as_name_rule(p)) // import_from_as_name ) { res = elem; @@ -12282,9 +12282,9 @@ _gather_32_rule(Parser *p) alias_ty elem; asdl_seq * seq; if ( - (elem = import_from_as_name_rule(p)) + (elem = import_from_as_name_rule(p)) // import_from_as_name && - (seq = _loop0_33_rule(p)) + (seq = _loop0_33_rule(p)) // _loop0_33 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12310,9 +12310,9 @@ _tmp_34_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = _PyPegen_name_token(p)) + (z = _PyPegen_name_token(p)) // NAME ) { res = z; @@ -12350,9 +12350,9 @@ _loop0_36_rule(Parser *p) alias_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = dotted_as_name_rule(p)) + (elem = dotted_as_name_rule(p)) // dotted_as_name ) { res = elem; @@ -12399,9 +12399,9 @@ _gather_35_rule(Parser *p) alias_ty elem; asdl_seq * seq; if ( - (elem = dotted_as_name_rule(p)) + (elem = dotted_as_name_rule(p)) // dotted_as_name && - (seq = _loop0_36_rule(p)) + (seq = _loop0_36_rule(p)) // _loop0_36 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12427,9 +12427,9 @@ _tmp_37_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = _PyPegen_name_token(p)) + (z = _PyPegen_name_token(p)) // NAME ) { res = z; @@ -12467,9 +12467,9 @@ _loop0_39_rule(Parser *p) withitem_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { res = elem; @@ -12516,9 +12516,9 @@ _gather_38_rule(Parser *p) withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_39_rule(p)) + (seq = _loop0_39_rule(p)) // _loop0_39 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12552,9 +12552,9 @@ _loop0_41_rule(Parser *p) withitem_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { res = elem; @@ -12601,9 +12601,9 @@ _gather_40_rule(Parser *p) withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_41_rule(p)) + (seq = _loop0_41_rule(p)) // _loop0_41 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12637,9 +12637,9 @@ _loop0_43_rule(Parser *p) withitem_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { res = elem; @@ -12686,9 +12686,9 @@ _gather_42_rule(Parser *p) withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_43_rule(p)) + (seq = _loop0_43_rule(p)) // _loop0_43 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12722,9 +12722,9 @@ _loop0_45_rule(Parser *p) withitem_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { res = elem; @@ -12771,9 +12771,9 @@ _gather_44_rule(Parser *p) withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_45_rule(p)) + (seq = _loop0_45_rule(p)) // _loop0_45 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12799,9 +12799,9 @@ _tmp_46_rule(Parser *p) Token * keyword; expr_ty t; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (t = target_rule(p)) + (t = target_rule(p)) // target ) { res = t; @@ -12838,7 +12838,7 @@ _loop1_47_rule(Parser *p) { // except_block excepthandler_ty except_block_var; while ( - (except_block_var = except_block_rule(p)) + (except_block_var = except_block_rule(p)) // except_block ) { res = except_block_var; @@ -12884,9 +12884,9 @@ _tmp_48_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = target_rule(p)) + (z = target_rule(p)) // target ) { res = z; @@ -12916,9 +12916,9 @@ _tmp_49_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { res = z; @@ -12948,9 +12948,9 @@ _tmp_50_rule(Parser *p) Token * literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) + (literal = _PyPegen_expect_token(p, 51)) // token='->' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { res = z; @@ -12980,9 +12980,9 @@ _tmp_51_rule(Parser *p) Token * literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) + (literal = _PyPegen_expect_token(p, 51)) // token='->' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { res = z; @@ -13012,9 +13012,9 @@ _tmp_52_rule(Parser *p) Token * indent_var; Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (indent_var = _PyPegen_expect_token(p, INDENT)) + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { res = _PyPegen_dummy_name(p, newline_var, indent_var); @@ -13047,7 +13047,7 @@ _loop0_53_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13096,7 +13096,7 @@ _loop0_54_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13145,7 +13145,7 @@ _loop0_55_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13194,7 +13194,7 @@ _loop1_56_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13247,7 +13247,7 @@ _loop0_57_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13296,7 +13296,7 @@ _loop1_58_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13349,7 +13349,7 @@ _loop1_59_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13402,7 +13402,7 @@ _loop1_60_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13455,7 +13455,7 @@ _loop0_61_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13504,7 +13504,7 @@ _loop1_62_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13557,7 +13557,7 @@ _loop0_63_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13606,7 +13606,7 @@ _loop1_64_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13659,7 +13659,7 @@ _loop0_65_rule(Parser *p) { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( - (param_maybe_default_var = param_maybe_default_rule(p)) + (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { res = param_maybe_default_var; @@ -13708,7 +13708,7 @@ _loop1_66_rule(Parser *p) { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( - (param_maybe_default_var = param_maybe_default_rule(p)) + (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { res = param_maybe_default_var; @@ -13761,7 +13761,7 @@ _loop1_67_rule(Parser *p) { // ('@' named_expression NEWLINE) void *_tmp_137_var; while ( - (_tmp_137_var = _tmp_137_rule(p)) + (_tmp_137_var = _tmp_137_rule(p)) // '@' named_expression NEWLINE ) { res = _tmp_137_var; @@ -13808,11 +13808,11 @@ _tmp_68_rule(Parser *p) Token * literal_1; void *z; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (z = arguments_rule(p), 1) + (z = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = z; @@ -13850,9 +13850,9 @@ _loop0_70_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = star_expression_rule(p)) + (elem = star_expression_rule(p)) // star_expression ) { res = elem; @@ -13899,9 +13899,9 @@ _gather_69_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = star_expression_rule(p)) + (elem = star_expression_rule(p)) // star_expression && - (seq = _loop0_70_rule(p)) + (seq = _loop0_70_rule(p)) // _loop0_70 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -13934,7 +13934,7 @@ _loop1_71_rule(Parser *p) { // (',' star_expression) void *_tmp_138_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) + (_tmp_138_var = _tmp_138_rule(p)) // ',' star_expression ) { res = _tmp_138_var; @@ -13988,9 +13988,9 @@ _loop0_73_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = star_named_expression_rule(p)) + (elem = star_named_expression_rule(p)) // star_named_expression ) { res = elem; @@ -14037,9 +14037,9 @@ _gather_72_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = star_named_expression_rule(p)) + (elem = star_named_expression_rule(p)) // star_named_expression && - (seq = _loop0_73_rule(p)) + (seq = _loop0_73_rule(p)) // _loop0_73 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -14072,7 +14072,7 @@ _loop1_74_rule(Parser *p) { // (',' expression) void *_tmp_139_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) + (_tmp_139_var = _tmp_139_rule(p)) // ',' expression ) { res = _tmp_139_var; @@ -14125,7 +14125,7 @@ _loop0_75_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14174,7 +14174,7 @@ _loop0_76_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14223,7 +14223,7 @@ _loop0_77_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14272,7 +14272,7 @@ _loop1_78_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14325,7 +14325,7 @@ _loop0_79_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14374,7 +14374,7 @@ _loop1_80_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14427,7 +14427,7 @@ _loop1_81_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14480,7 +14480,7 @@ _loop1_82_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14533,7 +14533,7 @@ _loop0_83_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14582,7 +14582,7 @@ _loop1_84_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14635,7 +14635,7 @@ _loop0_85_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14684,7 +14684,7 @@ _loop1_86_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14737,7 +14737,7 @@ _loop0_87_rule(Parser *p) { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { res = lambda_param_maybe_default_var; @@ -14786,7 +14786,7 @@ _loop1_88_rule(Parser *p) { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { res = lambda_param_maybe_default_var; @@ -14839,7 +14839,7 @@ _loop1_89_rule(Parser *p) { // ('or' conjunction) void *_tmp_140_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) + (_tmp_140_var = _tmp_140_rule(p)) // 'or' conjunction ) { res = _tmp_140_var; @@ -14892,7 +14892,7 @@ _loop1_90_rule(Parser *p) { // ('and' inversion) void *_tmp_141_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) + (_tmp_141_var = _tmp_141_rule(p)) // 'and' inversion ) { res = _tmp_141_var; @@ -14945,7 +14945,7 @@ _loop1_91_rule(Parser *p) { // compare_op_bitwise_or_pair CmpopExprPair* compare_op_bitwise_or_pair_var; while ( - (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) + (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair ) { res = compare_op_bitwise_or_pair_var; @@ -14990,7 +14990,7 @@ _tmp_92_rule(Parser *p) { // '!=' Token * tok; if ( - (tok = _PyPegen_expect_token(p, 28)) + (tok = _PyPegen_expect_token(p, 28)) // token='!=' ) { res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; @@ -15028,9 +15028,9 @@ _loop0_94_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = slice_rule(p)) + (elem = slice_rule(p)) // slice ) { res = elem; @@ -15077,9 +15077,9 @@ _gather_93_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = slice_rule(p)) + (elem = slice_rule(p)) // slice && - (seq = _loop0_94_rule(p)) + (seq = _loop0_94_rule(p)) // _loop0_94 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15105,9 +15105,9 @@ _tmp_95_rule(Parser *p) void *d; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (d = expression_rule(p), 1) + (d = expression_rule(p), 1) // expression? ) { res = d; @@ -15136,7 +15136,7 @@ _tmp_96_rule(Parser *p) { // tuple expr_ty tuple_var; if ( - (tuple_var = tuple_rule(p)) + (tuple_var = tuple_rule(p)) // tuple ) { res = tuple_var; @@ -15147,7 +15147,7 @@ _tmp_96_rule(Parser *p) { // group expr_ty group_var; if ( - (group_var = group_rule(p)) + (group_var = group_rule(p)) // group ) { res = group_var; @@ -15158,7 +15158,7 @@ _tmp_96_rule(Parser *p) { // genexp expr_ty genexp_var; if ( - (genexp_var = genexp_rule(p)) + (genexp_var = genexp_rule(p)) // genexp ) { res = genexp_var; @@ -15183,7 +15183,7 @@ _tmp_97_rule(Parser *p) { // list expr_ty list_var; if ( - (list_var = list_rule(p)) + (list_var = list_rule(p)) // list ) { res = list_var; @@ -15194,7 +15194,7 @@ _tmp_97_rule(Parser *p) { // listcomp expr_ty listcomp_var; if ( - (listcomp_var = listcomp_rule(p)) + (listcomp_var = listcomp_rule(p)) // listcomp ) { res = listcomp_var; @@ -15219,7 +15219,7 @@ _tmp_98_rule(Parser *p) { // dict expr_ty dict_var; if ( - (dict_var = dict_rule(p)) + (dict_var = dict_rule(p)) // dict ) { res = dict_var; @@ -15230,7 +15230,7 @@ _tmp_98_rule(Parser *p) { // set expr_ty set_var; if ( - (set_var = set_rule(p)) + (set_var = set_rule(p)) // set ) { res = set_var; @@ -15241,7 +15241,7 @@ _tmp_98_rule(Parser *p) { // dictcomp expr_ty dictcomp_var; if ( - (dictcomp_var = dictcomp_rule(p)) + (dictcomp_var = dictcomp_rule(p)) // dictcomp ) { res = dictcomp_var; @@ -15252,7 +15252,7 @@ _tmp_98_rule(Parser *p) { // setcomp expr_ty setcomp_var; if ( - (setcomp_var = setcomp_rule(p)) + (setcomp_var = setcomp_rule(p)) // setcomp ) { res = setcomp_var; @@ -15285,7 +15285,7 @@ _loop1_99_rule(Parser *p) { // STRING expr_ty string_var; while ( - (string_var = _PyPegen_string_token(p)) + (string_var = _PyPegen_string_token(p)) // STRING ) { res = string_var; @@ -15332,11 +15332,11 @@ _tmp_100_rule(Parser *p) expr_ty y; void *z; if ( - (y = star_named_expression_rule(p)) + (y = star_named_expression_rule(p)) // star_named_expression && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (z = star_named_expressions_rule(p), 1) + (z = star_named_expressions_rule(p), 1) // star_named_expressions? ) { res = _PyPegen_seq_insert_in_front ( p , y , z ); @@ -15365,7 +15365,7 @@ _tmp_101_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -15376,7 +15376,7 @@ _tmp_101_rule(Parser *p) { // named_expression expr_ty named_expression_var; if ( - (named_expression_var = named_expression_rule(p)) + (named_expression_var = named_expression_rule(p)) // named_expression ) { res = named_expression_var; @@ -15410,9 +15410,9 @@ _loop0_103_rule(Parser *p) KeyValuePair* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kvpair_rule(p)) + (elem = kvpair_rule(p)) // kvpair ) { res = elem; @@ -15459,9 +15459,9 @@ _gather_102_rule(Parser *p) KeyValuePair* elem; asdl_seq * seq; if ( - (elem = kvpair_rule(p)) + (elem = kvpair_rule(p)) // kvpair && - (seq = _loop0_103_rule(p)) + (seq = _loop0_103_rule(p)) // _loop0_103 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15494,7 +15494,7 @@ _loop1_104_rule(Parser *p) { // for_if_clause comprehension_ty for_if_clause_var; while ( - (for_if_clause_var = for_if_clause_rule(p)) + (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause ) { res = for_if_clause_var; @@ -15547,7 +15547,7 @@ _loop0_105_rule(Parser *p) { // ('if' disjunction) void *_tmp_142_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) + (_tmp_142_var = _tmp_142_rule(p)) // 'if' disjunction ) { res = _tmp_142_var; @@ -15596,7 +15596,7 @@ _loop0_106_rule(Parser *p) { // ('if' disjunction) void *_tmp_143_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) + (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction ) { res = _tmp_143_var; @@ -15638,9 +15638,9 @@ _tmp_107_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = args_rule(p)) + (c = args_rule(p)) // args ) { res = c; @@ -15670,9 +15670,9 @@ _tmp_108_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = args_rule(p)) + (c = args_rule(p)) // args ) { res = c; @@ -15710,9 +15710,9 @@ _loop0_110_rule(Parser *p) KeywordOrStarred* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { res = elem; @@ -15759,9 +15759,9 @@ _gather_109_rule(Parser *p) KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_110_rule(p)) + (seq = _loop0_110_rule(p)) // _loop0_110 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15795,9 +15795,9 @@ _loop0_112_rule(Parser *p) KeywordOrStarred* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { res = elem; @@ -15844,9 +15844,9 @@ _gather_111_rule(Parser *p) KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_112_rule(p)) + (seq = _loop0_112_rule(p)) // _loop0_112 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15880,9 +15880,9 @@ _loop0_114_rule(Parser *p) KeywordOrStarred* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { res = elem; @@ -15929,9 +15929,9 @@ _gather_113_rule(Parser *p) KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_114_rule(p)) + (seq = _loop0_114_rule(p)) // _loop0_114 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15965,9 +15965,9 @@ _loop0_116_rule(Parser *p) KeywordOrStarred* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { res = elem; @@ -16014,9 +16014,9 @@ _gather_115_rule(Parser *p) KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_116_rule(p)) + (seq = _loop0_116_rule(p)) // _loop0_116 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -16049,7 +16049,7 @@ _loop0_117_rule(Parser *p) { // (',' star_target) void *_tmp_144_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) + (_tmp_144_var = _tmp_144_rule(p)) // ',' star_target ) { res = _tmp_144_var; @@ -16099,9 +16099,9 @@ _loop0_119_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = star_target_rule(p)) + (elem = star_target_rule(p)) // star_target ) { res = elem; @@ -16148,9 +16148,9 @@ _gather_118_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = star_target_rule(p)) + (elem = star_target_rule(p)) // star_target && - (seq = _loop0_119_rule(p)) + (seq = _loop0_119_rule(p)) // _loop0_119 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -16175,9 +16175,9 @@ _tmp_120_rule(Parser *p) { // !'*' star_target expr_ty star_target_var; if ( - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' && - (star_target_var = star_target_rule(p)) + (star_target_var = star_target_rule(p)) // star_target ) { res = star_target_var; @@ -16211,9 +16211,9 @@ _loop0_122_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = del_target_rule(p)) + (elem = del_target_rule(p)) // del_target ) { res = elem; @@ -16260,9 +16260,9 @@ _gather_121_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = del_target_rule(p)) + (elem = del_target_rule(p)) // del_target && - (seq = _loop0_122_rule(p)) + (seq = _loop0_122_rule(p)) // _loop0_122 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -16296,9 +16296,9 @@ _loop0_124_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = target_rule(p)) + (elem = target_rule(p)) // target ) { res = elem; @@ -16345,9 +16345,9 @@ _gather_123_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = target_rule(p)) + (elem = target_rule(p)) // target && - (seq = _loop0_124_rule(p)) + (seq = _loop0_124_rule(p)) // _loop0_124 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -16372,7 +16372,7 @@ _tmp_125_rule(Parser *p) { // args expr_ty args_var; if ( - (args_var = args_rule(p)) + (args_var = args_rule(p)) // args ) { res = args_var; @@ -16384,9 +16384,9 @@ _tmp_125_rule(Parser *p) expr_ty expression_var; asdl_seq* for_if_clauses_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (for_if_clauses_var = for_if_clauses_rule(p)) + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); @@ -16412,9 +16412,9 @@ _tmp_126_rule(Parser *p) expr_ty annotated_rhs_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (annotated_rhs_var = annotated_rhs_rule(p)) + (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs ) { res = _PyPegen_dummy_name(p, literal, annotated_rhs_var); @@ -16439,7 +16439,7 @@ _tmp_127_rule(Parser *p) { // '=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' ) { res = literal; @@ -16450,7 +16450,7 @@ _tmp_127_rule(Parser *p) { // augassign AugOperator* augassign_var; if ( - (augassign_var = augassign_rule(p)) + (augassign_var = augassign_rule(p)) // augassign ) { res = augassign_var; @@ -16475,7 +16475,7 @@ _tmp_128_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -16486,7 +16486,7 @@ _tmp_128_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -16511,7 +16511,7 @@ _tmp_129_rule(Parser *p) { // '[' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' ) { res = literal; @@ -16522,7 +16522,7 @@ _tmp_129_rule(Parser *p) { // '(' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' ) { res = literal; @@ -16533,7 +16533,7 @@ _tmp_129_rule(Parser *p) { // '{' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' ) { res = literal; @@ -16566,7 +16566,7 @@ _loop0_130_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -16607,7 +16607,7 @@ _tmp_131_rule(Parser *p) { // slash_with_default SlashWithDefault* slash_with_default_var; if ( - (slash_with_default_var = slash_with_default_rule(p)) + (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { res = slash_with_default_var; @@ -16618,7 +16618,7 @@ _tmp_131_rule(Parser *p) { // param_with_default+ asdl_seq * _loop1_145_var; if ( - (_loop1_145_var = _loop1_145_rule(p)) + (_loop1_145_var = _loop1_145_rule(p)) // param_with_default+ ) { res = _loop1_145_var; @@ -16643,7 +16643,7 @@ _tmp_132_rule(Parser *p) { // ')' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 8)) + (literal = _PyPegen_expect_token(p, 8)) // token=')' ) { res = literal; @@ -16655,9 +16655,9 @@ _tmp_132_rule(Parser *p) void *_tmp_146_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_146_var = _tmp_146_rule(p)) + (_tmp_146_var = _tmp_146_rule(p)) // ')' | '**' ) { res = _PyPegen_dummy_name(p, literal, _tmp_146_var); @@ -16682,7 +16682,7 @@ _tmp_133_rule(Parser *p) { // ':' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' ) { res = literal; @@ -16694,9 +16694,9 @@ _tmp_133_rule(Parser *p) void *_tmp_147_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_147_var = _tmp_147_rule(p)) + (_tmp_147_var = _tmp_147_rule(p)) // ':' | '**' ) { res = _PyPegen_dummy_name(p, literal, _tmp_147_var); @@ -16722,9 +16722,9 @@ _tmp_134_rule(Parser *p) Token * literal; expr_ty z; if ( - (z = star_targets_rule(p)) + (z = star_targets_rule(p)) // star_targets && - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' ) { res = z; @@ -16753,7 +16753,7 @@ _tmp_135_rule(Parser *p) { // '.' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' ) { res = literal; @@ -16764,7 +16764,7 @@ _tmp_135_rule(Parser *p) { // '...' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 52)) + (literal = _PyPegen_expect_token(p, 52)) // token='...' ) { res = literal; @@ -16789,7 +16789,7 @@ _tmp_136_rule(Parser *p) { // '.' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' ) { res = literal; @@ -16800,7 +16800,7 @@ _tmp_136_rule(Parser *p) { // '...' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 52)) + (literal = _PyPegen_expect_token(p, 52)) // token='...' ) { res = literal; @@ -16827,11 +16827,11 @@ _tmp_137_rule(Parser *p) Token * literal; Token * newline_var; if ( - (literal = _PyPegen_expect_token(p, 49)) + (literal = _PyPegen_expect_token(p, 49)) // token='@' && - (f = named_expression_rule(p)) + (f = named_expression_rule(p)) // named_expression && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = f; @@ -16861,9 +16861,9 @@ _tmp_138_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = star_expression_rule(p)) + (c = star_expression_rule(p)) // star_expression ) { res = c; @@ -16893,9 +16893,9 @@ _tmp_139_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = expression_rule(p)) + (c = expression_rule(p)) // expression ) { res = c; @@ -16925,9 +16925,9 @@ _tmp_140_rule(Parser *p) expr_ty c; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 532)) + (keyword = _PyPegen_expect_token(p, 532)) // token='or' && - (c = conjunction_rule(p)) + (c = conjunction_rule(p)) // conjunction ) { res = c; @@ -16957,9 +16957,9 @@ _tmp_141_rule(Parser *p) expr_ty c; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 533)) + (keyword = _PyPegen_expect_token(p, 533)) // token='and' && - (c = inversion_rule(p)) + (c = inversion_rule(p)) // inversion ) { res = c; @@ -16989,9 +16989,9 @@ _tmp_142_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (z = disjunction_rule(p)) + (z = disjunction_rule(p)) // disjunction ) { res = z; @@ -17021,9 +17021,9 @@ _tmp_143_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (z = disjunction_rule(p)) + (z = disjunction_rule(p)) // disjunction ) { res = z; @@ -17053,9 +17053,9 @@ _tmp_144_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = star_target_rule(p)) + (c = star_target_rule(p)) // star_target ) { res = c; @@ -17092,7 +17092,7 @@ _loop1_145_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -17137,7 +17137,7 @@ _tmp_146_rule(Parser *p) { // ')' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 8)) + (literal = _PyPegen_expect_token(p, 8)) // token=')' ) { res = literal; @@ -17148,7 +17148,7 @@ _tmp_146_rule(Parser *p) { // '**' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' ) { res = literal; @@ -17173,7 +17173,7 @@ _tmp_147_rule(Parser *p) { // ':' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' ) { res = literal; @@ -17184,7 +17184,7 @@ _tmp_147_rule(Parser *p) { // '**' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' ) { res = literal; diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 40004e7875278..3bf6d9ed6a3ab 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -68,6 +68,7 @@ class FunctionCall: return_type: Optional[str] = None nodetype: Optional[NodeTypes] = None force_true: bool = False + comment: Optional[str] = None def __str__(self) -> str: parts = [] @@ -78,6 +79,8 @@ def __str__(self) -> str: parts.append(", 1") if self.assigned_variable: parts = ["(", self.assigned_variable, " = ", *parts, ")"] + if self.comment: + parts.append(f" // {self.comment}") return "".join(parts) @@ -103,6 +106,7 @@ def keyword_helper(self, keyword: str) -> FunctionCall: arguments=["p", self.keyword_cache[keyword]], return_type="Token *", nodetype=NodeTypes.KEYWORD, + comment=f"token='{keyword}'", ) def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: @@ -115,6 +119,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: arguments=["p"], nodetype=BASE_NODETYPES[name], return_type="expr_ty", + comment=name, ) return FunctionCall( assigned_variable=f"{name.lower()}_var", @@ -122,6 +127,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: arguments=["p", name], nodetype=NodeTypes.GENERIC_TOKEN, return_type="Token *", + comment=f"token='{name}'", ) type = None @@ -134,6 +140,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type=type, + comment=f"{node}" ) def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: @@ -149,6 +156,7 @@ def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: arguments=["p", type], nodetype=NodeTypes.GENERIC_TOKEN, return_type="Token *", + comment=f"token='{val}'", ) def visit_Rhs(self, node: Rhs) -> FunctionCall: @@ -168,6 +176,7 @@ def can_we_inline(node: Rhs) -> int: name = self.gen.name_node(node) self.cache[node] = FunctionCall( assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], + comment=f"{node}" ) return self.cache[node] @@ -190,6 +199,7 @@ def lookahead_call_helper(self, node: Lookahead, positive: int) -> FunctionCall: function=f"_PyPegen_lookahead_with_int", arguments=[positive, call.function, *call.arguments], return_type="int", + comment=f"token={node.node}", ) else: return FunctionCall( @@ -211,6 +221,7 @@ def visit_Opt(self, node: Opt) -> FunctionCall: function=call.function, arguments=call.arguments, force_true=True, + comment=f"{node}" ) def visit_Repeat0(self, node: Repeat0) -> FunctionCall: @@ -222,6 +233,7 @@ def visit_Repeat0(self, node: Repeat0) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type="asdl_seq *", + comment=f"{node}", ) return self.cache[node] @@ -234,6 +246,7 @@ def visit_Repeat1(self, node: Repeat1) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type="asdl_seq *", + comment=f"{node}", ) return self.cache[node] @@ -246,6 +259,7 @@ def visit_Gather(self, node: Gather) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type="asdl_seq *", + comment=f"{node}", ) return self.cache[node] From webhook-mailer at python.org Wed May 6 21:09:40 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 07 May 2020 01:09:40 -0000 Subject: [Python-checkins] bpo-40397: Refactor typing._GenericAlias (GH-19719) Message-ID: https://github.com/python/cpython/commit/c1c7d8ead9eb214a6149a43e31a3213c52448877 commit: c1c7d8ead9eb214a6149a43e31a3213c52448877 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-07T04:09:33+03:00 summary: bpo-40397: Refactor typing._GenericAlias (GH-19719) Make the design more object-oriented. Split _GenericAlias on two almost independent classes: for special generic aliases like List and for parametrized generic aliases like List[int]. Add specialized subclasses for Callable, Callable[...], Tuple and Union[...]. files: M Lib/typing.py diff --git a/Lib/typing.py b/Lib/typing.py index f3cd280a09e27..681ab6d21e0a3 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -181,34 +181,11 @@ def _collect_type_vars(types): for t in types: if isinstance(t, TypeVar) and t not in tvars: tvars.append(t) - if ((isinstance(t, _GenericAlias) and not t._special) - or isinstance(t, GenericAlias)): + if isinstance(t, (_GenericAlias, GenericAlias)): tvars.extend([t for t in t.__parameters__ if t not in tvars]) return tuple(tvars) -def _subs_tvars(tp, tvars, subs): - """Substitute type variables 'tvars' with substitutions 'subs'. - These two must have the same length. - """ - if not isinstance(tp, (_GenericAlias, GenericAlias)): - return tp - new_args = list(tp.__args__) - for a, arg in enumerate(tp.__args__): - if isinstance(arg, TypeVar): - for i, tvar in enumerate(tvars): - if arg == tvar: - new_args[a] = subs[i] - else: - new_args[a] = _subs_tvars(arg, tvars, subs) - if tp.__origin__ is Union: - return Union[tuple(new_args)] - if isinstance(tp, GenericAlias): - return GenericAlias(tp.__origin__, tuple(new_args)) - else: - return tp.copy_with(tuple(new_args)) - - def _check_generic(cls, parameters): """Check correct count for parameters of a generic cls (internal helper). This gives a nice error message in case of count mismatch. @@ -229,7 +206,7 @@ def _remove_dups_flatten(parameters): # Flatten out Union[Union[...], ...]. params = [] for p in parameters: - if isinstance(p, _GenericAlias) and p.__origin__ is Union: + if isinstance(p, _UnionGenericAlias): params.extend(p.__args__) elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: params.extend(p[1:]) @@ -274,18 +251,14 @@ def _eval_type(t, globalns, localns): """ if isinstance(t, ForwardRef): return t._evaluate(globalns, localns) - if isinstance(t, _GenericAlias): + if isinstance(t, (_GenericAlias, GenericAlias)): ev_args = tuple(_eval_type(a, globalns, localns) for a in t.__args__) if ev_args == t.__args__: return t - res = t.copy_with(ev_args) - res._special = t._special - return res - if isinstance(t, GenericAlias): - ev_args = tuple(_eval_type(a, globalns, localns) for a in t.__args__) - if ev_args == t.__args__: - return t - return GenericAlias(t.__origin__, ev_args) + if isinstance(t, GenericAlias): + return GenericAlias(t.__origin__, ev_args) + else: + return t.copy_with(ev_args) return t @@ -300,6 +273,7 @@ def __init_subclass__(self, /, *args, **kwds): class _Immutable: """Mixin to indicate that object should not be copied.""" + __slots__ = () def __copy__(self): return self @@ -446,7 +420,7 @@ def Union(self, parameters): parameters = _remove_dups_flatten(parameters) if len(parameters) == 1: return parameters[0] - return _GenericAlias(self, parameters) + return _UnionGenericAlias(self, parameters) @_SpecialForm def Optional(self, parameters): @@ -579,7 +553,7 @@ def longest(x: A, y: A) -> A: """ __slots__ = ('__name__', '__bound__', '__constraints__', - '__covariant__', '__contravariant__') + '__covariant__', '__contravariant__', '__dict__') def __init__(self, name, *constraints, bound=None, covariant=False, contravariant=False): @@ -629,23 +603,10 @@ def __reduce__(self): # e.g., Dict[T, int].__args__ == (T, int). -# Mapping from non-generic type names that have a generic alias in typing -# but with a different name. -_normalize_alias = {'list': 'List', - 'tuple': 'Tuple', - 'dict': 'Dict', - 'set': 'Set', - 'frozenset': 'FrozenSet', - 'deque': 'Deque', - 'defaultdict': 'DefaultDict', - 'type': 'Type', - 'Set': 'AbstractSet'} - def _is_dunder(attr): return attr.startswith('__') and attr.endswith('__') - -class _GenericAlias(_Final, _root=True): +class _BaseGenericAlias(_Final, _root=True): """The central part of internal API. This represents a generic version of type 'origin' with type arguments 'params'. @@ -654,12 +615,8 @@ class _GenericAlias(_Final, _root=True): have 'name' always set. If 'inst' is False, then the alias can't be instantiated, this is used by e.g. typing.List and typing.Dict. """ - def __init__(self, origin, params, *, inst=True, special=False, name=None): + def __init__(self, origin, params, *, inst=True, name=None): self._inst = inst - self._special = special - if special and name is None: - orig_name = origin.__name__ - name = _normalize_alias.get(orig_name, orig_name) self._name = name if not isinstance(params, tuple): params = (params,) @@ -671,68 +628,20 @@ def __init__(self, origin, params, *, inst=True, special=False, name=None): self.__slots__ = None # This is not documented. if not name: self.__module__ = origin.__module__ - if special: - self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}' - - @_tp_cache - def __getitem__(self, params): - if self.__origin__ in (Generic, Protocol): - # Can't subscript Generic[...] or Protocol[...]. - raise TypeError(f"Cannot subscript already-subscripted {self}") - if not isinstance(params, tuple): - params = (params,) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - _check_generic(self, params) - return _subs_tvars(self, self.__parameters__, params) - - def copy_with(self, params): - # We don't copy self._special. - return _GenericAlias(self.__origin__, params, name=self._name, inst=self._inst) - - def __repr__(self): - if (self.__origin__ == Union and len(self.__args__) == 2 - and type(None) in self.__args__): - if self.__args__[0] is not type(None): - arg = self.__args__[0] - else: - arg = self.__args__[1] - return (f'typing.Optional[{_type_repr(arg)}]') - if (self._name != 'Callable' or - len(self.__args__) == 2 and self.__args__[0] is Ellipsis): - if self._name: - name = 'typing.' + self._name - else: - name = _type_repr(self.__origin__) - if not self._special: - args = f'[{", ".join([_type_repr(a) for a in self.__args__])}]' - else: - args = '' - return (f'{name}{args}') - if self._special: - return 'typing.Callable' - return (f'typing.Callable' - f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], ' - f'{_type_repr(self.__args__[-1])}]') def __eq__(self, other): - if not isinstance(other, _GenericAlias): + if not isinstance(other, _BaseGenericAlias): return NotImplemented - if self.__origin__ != other.__origin__: - return False - if self.__origin__ is Union and other.__origin__ is Union: - return frozenset(self.__args__) == frozenset(other.__args__) - return self.__args__ == other.__args__ + return (self.__origin__ == other.__origin__ + and self.__args__ == other.__args__) def __hash__(self): - if self.__origin__ is Union: - return hash((Union, frozenset(self.__args__))) return hash((self.__origin__, self.__args__)) def __call__(self, *args, **kwargs): if not self._inst: raise TypeError(f"Type {self._name} cannot be instantiated; " - f"use {self._name.lower()}() instead") + f"use {self.__origin__.__name__}() instead") result = self.__origin__(*args, **kwargs) try: result.__orig_class__ = self @@ -741,23 +650,16 @@ def __call__(self, *args, **kwargs): return result def __mro_entries__(self, bases): - if self._name: # generic version of an ABC or built-in class - res = [] - if self.__origin__ not in bases: - res.append(self.__origin__) - i = bases.index(self) - if not any(isinstance(b, _GenericAlias) or issubclass(b, Generic) - for b in bases[i+1:]): - res.append(Generic) - return tuple(res) - if self.__origin__ is Generic: - if Protocol in bases: - return () - i = bases.index(self) - for b in bases[i+1:]: - if isinstance(b, _GenericAlias) and b is not self: - return () - return (self.__origin__,) + res = [] + if self.__origin__ not in bases: + res.append(self.__origin__) + i = bases.index(self) + for b in bases[i+1:]: + if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic): + break + else: + res.append(Generic) + return tuple(res) def __getattr__(self, attr): # We are careful for copy and pickle. @@ -767,7 +669,7 @@ def __getattr__(self, attr): raise AttributeError(attr) def __setattr__(self, attr, val): - if _is_dunder(attr) or attr in ('_name', '_inst', '_special'): + if _is_dunder(attr) or attr in ('_name', '_inst'): super().__setattr__(attr, val) else: setattr(self.__origin__, attr, val) @@ -776,39 +678,124 @@ def __instancecheck__(self, obj): return self.__subclasscheck__(type(obj)) def __subclasscheck__(self, cls): - if self._special: - if not isinstance(cls, _GenericAlias): - return issubclass(cls, self.__origin__) - if cls._special: - return issubclass(cls.__origin__, self.__origin__) raise TypeError("Subscripted generics cannot be used with" " class and instance checks") - def __reduce__(self): - if self._special: - return self._name +class _GenericAlias(_BaseGenericAlias, _root=True): + @_tp_cache + def __getitem__(self, params): + if self.__origin__ in (Generic, Protocol): + # Can't subscript Generic[...] or Protocol[...]. + raise TypeError(f"Cannot subscript already-subscripted {self}") + if not isinstance(params, tuple): + params = (params,) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + _check_generic(self, params) + + subst = dict(zip(self.__parameters__, params)) + new_args = [] + for arg in self.__args__: + if isinstance(arg, TypeVar): + arg = subst[arg] + elif isinstance(arg, (_BaseGenericAlias, GenericAlias)): + subargs = tuple(subst[x] for x in arg.__parameters__) + arg = arg[subargs] + new_args.append(arg) + return self.copy_with(tuple(new_args)) + + def copy_with(self, params): + return self.__class__(self.__origin__, params, name=self._name, inst=self._inst) + + def __repr__(self): + if self._name: + name = 'typing.' + self._name + else: + name = _type_repr(self.__origin__) + args = ", ".join([_type_repr(a) for a in self.__args__]) + return f'{name}[{args}]' + + def __reduce__(self): if self._name: origin = globals()[self._name] else: origin = self.__origin__ - if (origin is Callable and - not (len(self.__args__) == 2 and self.__args__[0] is Ellipsis)): - args = list(self.__args__[:-1]), self.__args__[-1] - else: - args = tuple(self.__args__) - if len(args) == 1 and not isinstance(args[0], tuple): - args, = args + args = tuple(self.__args__) + if len(args) == 1 and not isinstance(args[0], tuple): + args, = args return operator.getitem, (origin, args) + def __mro_entries__(self, bases): + if self._name: # generic version of an ABC or built-in class + return super().__mro_entries__(bases) + if self.__origin__ is Generic: + if Protocol in bases: + return () + i = bases.index(self) + for b in bases[i+1:]: + if isinstance(b, _BaseGenericAlias) and b is not self: + return () + return (self.__origin__,) + + +class _SpecialGenericAlias(_BaseGenericAlias, _root=True): + def __init__(self, origin, params, *, inst=True, name=None): + if name is None: + name = origin.__name__ + super().__init__(origin, params, inst=inst, name=name) + self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}' + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + _check_generic(self, params) + assert self.__args__ == self.__parameters__ + return self.copy_with(params) + + def copy_with(self, params): + return _GenericAlias(self.__origin__, params, + name=self._name, inst=self._inst) + + def __repr__(self): + return 'typing.' + self._name + + def __subclasscheck__(self, cls): + if isinstance(cls, _SpecialGenericAlias): + return issubclass(cls.__origin__, self.__origin__) + if not isinstance(cls, _GenericAlias): + return issubclass(cls, self.__origin__) + return super().__subclasscheck__(cls) + + def __reduce__(self): + return self._name + + +class _CallableGenericAlias(_GenericAlias, _root=True): + def __repr__(self): + assert self._name == 'Callable' + if len(self.__args__) == 2 and self.__args__[0] is Ellipsis: + return super().__repr__() + return (f'typing.Callable' + f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], ' + f'{_type_repr(self.__args__[-1])}]') + + def __reduce__(self): + args = self.__args__ + if not (len(args) == 2 and args[0] is ...): + args = list(args[:-1]), args[-1] + return operator.getitem, (Callable, args) + + +class _CallableType(_SpecialGenericAlias, _root=True): + def copy_with(self, params): + return _CallableGenericAlias(self.__origin__, params, + name=self._name, inst=self._inst) -class _VariadicGenericAlias(_GenericAlias, _root=True): - """Same as _GenericAlias above but for variadic aliases. Currently, - this is used only by special internal aliases: Tuple and Callable. - """ def __getitem__(self, params): - if self._name != 'Callable' or not self._special: - return self.__getitem_inner__(params) if not isinstance(params, tuple) or len(params) != 2: raise TypeError("Callable must be used as " "Callable[[arg, ...], result].") @@ -824,29 +811,53 @@ def __getitem__(self, params): @_tp_cache def __getitem_inner__(self, params): - if self.__origin__ is tuple and self._special: - if params == (): - return self.copy_with((_TypingEmpty,)) - if not isinstance(params, tuple): - params = (params,) - if len(params) == 2 and params[1] is ...: - msg = "Tuple[t, ...]: t must be a type." - p = _type_check(params[0], msg) - return self.copy_with((p, _TypingEllipsis)) - msg = "Tuple[t0, t1, ...]: each t must be a type." - params = tuple(_type_check(p, msg) for p in params) - return self.copy_with(params) - if self.__origin__ is collections.abc.Callable and self._special: - args, result = params - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - if args is Ellipsis: - return self.copy_with((_TypingEllipsis, result)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - params = args + (result,) - return self.copy_with(params) - return super().__getitem__(params) + args, result = params + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return self.copy_with((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + params = args + (result,) + return self.copy_with(params) + + +class _TupleType(_SpecialGenericAlias, _root=True): + @_tp_cache + def __getitem__(self, params): + if params == (): + return self.copy_with((_TypingEmpty,)) + if not isinstance(params, tuple): + params = (params,) + if len(params) == 2 and params[1] is ...: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(params[0], msg) + return self.copy_with((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + params = tuple(_type_check(p, msg) for p in params) + return self.copy_with(params) + + +class _UnionGenericAlias(_GenericAlias, _root=True): + def copy_with(self, params): + return Union[params] + + def __eq__(self, other): + if not isinstance(other, _UnionGenericAlias): + return NotImplemented + return set(self.__args__) == set(other.__args__) + + def __hash__(self): + return hash(frozenset(self.__args__)) + + def __repr__(self): + args = self.__args__ + if len(args) == 2: + if args[0] is type(None): + return f'typing.Optional[{_type_repr(args[1])}]' + elif args[1] is type(None): + return f'typing.Optional[{_type_repr(args[0])}]' + return super().__repr__() class Generic: @@ -1162,9 +1173,8 @@ def __reduce__(self): def __eq__(self, other): if not isinstance(other, _AnnotatedAlias): return NotImplemented - if self.__origin__ != other.__origin__: - return False - return self.__metadata__ == other.__metadata__ + return (self.__origin__ == other.__origin__ + and self.__metadata__ == other.__metadata__) def __hash__(self): return hash((self.__origin__, self.__metadata__)) @@ -1380,9 +1390,7 @@ def _strip_annotations(t): stripped_args = tuple(_strip_annotations(a) for a in t.__args__) if stripped_args == t.__args__: return t - res = t.copy_with(stripped_args) - res._special = t._special - return res + return t.copy_with(stripped_args) if isinstance(t, GenericAlias): stripped_args = tuple(_strip_annotations(a) for a in t.__args__) if stripped_args == t.__args__: @@ -1407,7 +1415,7 @@ def get_origin(tp): """ if isinstance(tp, _AnnotatedAlias): return Annotated - if isinstance(tp, (_GenericAlias, GenericAlias)): + if isinstance(tp, (_BaseGenericAlias, GenericAlias)): return tp.__origin__ if tp is Generic: return Generic @@ -1427,7 +1435,7 @@ def get_args(tp): """ if isinstance(tp, _AnnotatedAlias): return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, _GenericAlias) and not tp._special: + if isinstance(tp, _GenericAlias): res = tp.__args__ if tp.__origin__ is collections.abc.Callable and res[0] is not Ellipsis: res = (list(res[:-1]), res[-1]) @@ -1561,8 +1569,7 @@ class Other(Leaf): # Error reported by type checker # Various ABCs mimicking those in collections.abc. -def _alias(origin, params, inst=True): - return _GenericAlias(origin, params, special=True, inst=inst) +_alias = _SpecialGenericAlias Hashable = _alias(collections.abc.Hashable, ()) # Not generic. Awaitable = _alias(collections.abc.Awaitable, T_co) @@ -1575,7 +1582,7 @@ def _alias(origin, params, inst=True): Sized = _alias(collections.abc.Sized, ()) # Not generic. Container = _alias(collections.abc.Container, T_co) Collection = _alias(collections.abc.Collection, T_co) -Callable = _VariadicGenericAlias(collections.abc.Callable, (), special=True) +Callable = _CallableType(collections.abc.Callable, ()) Callable.__doc__ = \ """Callable type; Callable[[int], str] is a function of (int) -> str. @@ -1586,7 +1593,7 @@ def _alias(origin, params, inst=True): There is no syntax to indicate optional or keyword arguments, such function types are rarely used as callback types. """ -AbstractSet = _alias(collections.abc.Set, T_co) +AbstractSet = _alias(collections.abc.Set, T_co, name='AbstractSet') MutableSet = _alias(collections.abc.MutableSet, T) # NOTE: Mapping is only covariant in the value type. Mapping = _alias(collections.abc.Mapping, (KT, VT_co)) @@ -1594,7 +1601,7 @@ def _alias(origin, params, inst=True): Sequence = _alias(collections.abc.Sequence, T_co) MutableSequence = _alias(collections.abc.MutableSequence, T) ByteString = _alias(collections.abc.ByteString, ()) # Not generic -Tuple = _VariadicGenericAlias(tuple, (), inst=False, special=True) +Tuple = _TupleType(tuple, (), inst=False, name='Tuple') Tuple.__doc__ = \ """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. @@ -1604,24 +1611,24 @@ def _alias(origin, params, inst=True): To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. """ -List = _alias(list, T, inst=False) -Deque = _alias(collections.deque, T) -Set = _alias(set, T, inst=False) -FrozenSet = _alias(frozenset, T_co, inst=False) +List = _alias(list, T, inst=False, name='List') +Deque = _alias(collections.deque, T, name='Deque') +Set = _alias(set, T, inst=False, name='Set') +FrozenSet = _alias(frozenset, T_co, inst=False, name='FrozenSet') MappingView = _alias(collections.abc.MappingView, T_co) KeysView = _alias(collections.abc.KeysView, KT) ItemsView = _alias(collections.abc.ItemsView, (KT, VT_co)) ValuesView = _alias(collections.abc.ValuesView, VT_co) -ContextManager = _alias(contextlib.AbstractContextManager, T_co) -AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, T_co) -Dict = _alias(dict, (KT, VT), inst=False) -DefaultDict = _alias(collections.defaultdict, (KT, VT)) +ContextManager = _alias(contextlib.AbstractContextManager, T_co, name='ContextManager') +AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, T_co, name='AsyncContextManager') +Dict = _alias(dict, (KT, VT), inst=False, name='Dict') +DefaultDict = _alias(collections.defaultdict, (KT, VT), name='DefaultDict') OrderedDict = _alias(collections.OrderedDict, (KT, VT)) Counter = _alias(collections.Counter, T) ChainMap = _alias(collections.ChainMap, (KT, VT)) Generator = _alias(collections.abc.Generator, (T_co, T_contra, V_co)) AsyncGenerator = _alias(collections.abc.AsyncGenerator, (T_co, T_contra)) -Type = _alias(type, CT_co, inst=False) +Type = _alias(type, CT_co, inst=False, name='Type') Type.__doc__ = \ """A special construct usable to annotate class objects. From webhook-mailer at python.org Wed May 6 21:17:23 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Thu, 07 May 2020 01:17:23 -0000 Subject: [Python-checkins] bpo-1635741: Port errno module to multiphase initialization (GH-19923) Message-ID: https://github.com/python/cpython/commit/3466922320d54a922cfe6d6d44e89e1cea4023ef commit: 3466922320d54a922cfe6d6d44e89e1cea4023ef branch: master author: Dong-hee Na committer: GitHub date: 2020-05-07T10:17:16+09:00 summary: bpo-1635741: Port errno module to multiphase initialization (GH-19923) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst M Modules/errnomodule.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst new file mode 100644 index 0000000000000..197eae97c3d1a --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst @@ -0,0 +1 @@ +Port :mod:`errno` to multiphase initialization (:pep:`489`). diff --git a/Modules/errnomodule.c b/Modules/errnomodule.c index 06ed53a64dbdc..d99bed45bd6a2 100644 --- a/Modules/errnomodule.c +++ b/Modules/errnomodule.c @@ -46,66 +46,57 @@ static PyMethodDef errno_methods[] = { /* Helper function doing the dictionary inserting */ -static void -_inscode(PyObject *d, PyObject *de, const char *name, int code) +static int +_add_errcode(PyObject *module_dict, PyObject *error_dict, const char *name_str, int code_int) { - PyObject *u = PyUnicode_FromString(name); - PyObject *v = PyLong_FromLong((long) code); - - /* Don't bother checking for errors; they'll be caught at the end - * of the module initialization function by the caller of - * initerrno(). - */ - if (u && v) { - /* insert in modules dict */ - PyDict_SetItem(d, u, v); - /* insert in errorcode dict */ - PyDict_SetItem(de, v, u); + PyObject *name = PyUnicode_FromString(name_str); + if (!name) { + return -1; } - Py_XDECREF(u); - Py_XDECREF(v); -} -PyDoc_STRVAR(errno__doc__, -"This module makes available standard errno system symbols.\n\ -\n\ -The value of each symbol is the corresponding integer value,\n\ -e.g., on most systems, errno.ENOENT equals the integer 2.\n\ -\n\ -The dictionary errno.errorcode maps numeric codes to symbol names,\n\ -e.g., errno.errorcode[2] could be the string 'ENOENT'.\n\ -\n\ -Symbols that are not relevant to the underlying system are not defined.\n\ -\n\ -To map error codes to error messages, use the function os.strerror(),\n\ -e.g. os.strerror(2) could return 'No such file or directory'."); + PyObject *code = PyLong_FromLong(code_int); + if (!code) { + Py_DECREF(name); + return -1; + } -static struct PyModuleDef errnomodule = { - PyModuleDef_HEAD_INIT, - "errno", - errno__doc__, - -1, - errno_methods, - NULL, - NULL, - NULL, - NULL -}; + int ret = -1; + /* insert in modules dict */ + if (PyDict_SetItem(module_dict, name, code) < 0) { + goto end; + } + /* insert in errorcode dict */ + if (PyDict_SetItem(error_dict, code, name) < 0) { + goto end; + } + ret = 0; +end: + Py_DECREF(name); + Py_DECREF(code); + return ret; +} -PyMODINIT_FUNC -PyInit_errno(void) +static int +errno_exec(PyObject *module) { - PyObject *m, *d, *de; - m = PyModule_Create(&errnomodule); - if (m == NULL) - return NULL; - d = PyModule_GetDict(m); - de = PyDict_New(); - if (!d || !de || PyDict_SetItemString(d, "errorcode", de) < 0) - return NULL; + PyObject *module_dict = PyModule_GetDict(module); + PyObject *error_dict = PyDict_New(); + if (!module_dict || !error_dict) { + return -1; + } + if (PyDict_SetItemString(module_dict, "errorcode", error_dict) < 0) { + Py_DECREF(error_dict); + return -1; + } /* Macro so I don't have to edit each and every line below... */ -#define inscode(d, ds, de, name, code, comment) _inscode(d, de, name, code) +#define add_errcode(name, code, comment) \ + do { \ + if (_add_errcode(module_dict, error_dict, name, code) < 0) { \ + Py_DECREF(error_dict); \ + return -1; \ + } \ + } while (0); /* * The names and comments are borrowed from linux/include/errno.h, @@ -116,820 +107,854 @@ PyInit_errno(void) */ #ifdef ENODEV - inscode(d, ds, de, "ENODEV", ENODEV, "No such device"); + add_errcode("ENODEV", ENODEV, "No such device"); #endif #ifdef ENOCSI - inscode(d, ds, de, "ENOCSI", ENOCSI, "No CSI structure available"); + add_errcode("ENOCSI", ENOCSI, "No CSI structure available"); #endif #ifdef EHOSTUNREACH - inscode(d, ds, de, "EHOSTUNREACH", EHOSTUNREACH, "No route to host"); + add_errcode("EHOSTUNREACH", EHOSTUNREACH, "No route to host"); #else #ifdef WSAEHOSTUNREACH - inscode(d, ds, de, "EHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); + add_errcode("EHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); #endif #endif #ifdef ENOMSG - inscode(d, ds, de, "ENOMSG", ENOMSG, "No message of desired type"); + add_errcode("ENOMSG", ENOMSG, "No message of desired type"); #endif #ifdef EUCLEAN - inscode(d, ds, de, "EUCLEAN", EUCLEAN, "Structure needs cleaning"); + add_errcode("EUCLEAN", EUCLEAN, "Structure needs cleaning"); #endif #ifdef EL2NSYNC - inscode(d, ds, de, "EL2NSYNC", EL2NSYNC, "Level 2 not synchronized"); + add_errcode("EL2NSYNC", EL2NSYNC, "Level 2 not synchronized"); #endif #ifdef EL2HLT - inscode(d, ds, de, "EL2HLT", EL2HLT, "Level 2 halted"); + add_errcode("EL2HLT", EL2HLT, "Level 2 halted"); #endif #ifdef ENODATA - inscode(d, ds, de, "ENODATA", ENODATA, "No data available"); + add_errcode("ENODATA", ENODATA, "No data available"); #endif #ifdef ENOTBLK - inscode(d, ds, de, "ENOTBLK", ENOTBLK, "Block device required"); + add_errcode("ENOTBLK", ENOTBLK, "Block device required"); #endif #ifdef ENOSYS - inscode(d, ds, de, "ENOSYS", ENOSYS, "Function not implemented"); + add_errcode("ENOSYS", ENOSYS, "Function not implemented"); #endif #ifdef EPIPE - inscode(d, ds, de, "EPIPE", EPIPE, "Broken pipe"); + add_errcode("EPIPE", EPIPE, "Broken pipe"); #endif #ifdef EINVAL - inscode(d, ds, de, "EINVAL", EINVAL, "Invalid argument"); + add_errcode("EINVAL", EINVAL, "Invalid argument"); #else #ifdef WSAEINVAL - inscode(d, ds, de, "EINVAL", WSAEINVAL, "Invalid argument"); + add_errcode("EINVAL", WSAEINVAL, "Invalid argument"); #endif #endif #ifdef EOVERFLOW - inscode(d, ds, de, "EOVERFLOW", EOVERFLOW, "Value too large for defined data type"); + add_errcode("EOVERFLOW", EOVERFLOW, "Value too large for defined data type"); #endif #ifdef EADV - inscode(d, ds, de, "EADV", EADV, "Advertise error"); + add_errcode("EADV", EADV, "Advertise error"); #endif #ifdef EINTR - inscode(d, ds, de, "EINTR", EINTR, "Interrupted system call"); + add_errcode("EINTR", EINTR, "Interrupted system call"); #else #ifdef WSAEINTR - inscode(d, ds, de, "EINTR", WSAEINTR, "Interrupted system call"); + add_errcode("EINTR", WSAEINTR, "Interrupted system call"); #endif #endif #ifdef EUSERS - inscode(d, ds, de, "EUSERS", EUSERS, "Too many users"); + add_errcode("EUSERS", EUSERS, "Too many users"); #else #ifdef WSAEUSERS - inscode(d, ds, de, "EUSERS", WSAEUSERS, "Too many users"); + add_errcode("EUSERS", WSAEUSERS, "Too many users"); #endif #endif #ifdef ENOTEMPTY - inscode(d, ds, de, "ENOTEMPTY", ENOTEMPTY, "Directory not empty"); + add_errcode("ENOTEMPTY", ENOTEMPTY, "Directory not empty"); #else #ifdef WSAENOTEMPTY - inscode(d, ds, de, "ENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); + add_errcode("ENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); #endif #endif #ifdef ENOBUFS - inscode(d, ds, de, "ENOBUFS", ENOBUFS, "No buffer space available"); + add_errcode("ENOBUFS", ENOBUFS, "No buffer space available"); #else #ifdef WSAENOBUFS - inscode(d, ds, de, "ENOBUFS", WSAENOBUFS, "No buffer space available"); + add_errcode("ENOBUFS", WSAENOBUFS, "No buffer space available"); #endif #endif #ifdef EPROTO - inscode(d, ds, de, "EPROTO", EPROTO, "Protocol error"); + add_errcode("EPROTO", EPROTO, "Protocol error"); #endif #ifdef EREMOTE - inscode(d, ds, de, "EREMOTE", EREMOTE, "Object is remote"); + add_errcode("EREMOTE", EREMOTE, "Object is remote"); #else #ifdef WSAEREMOTE - inscode(d, ds, de, "EREMOTE", WSAEREMOTE, "Object is remote"); + add_errcode("EREMOTE", WSAEREMOTE, "Object is remote"); #endif #endif #ifdef ENAVAIL - inscode(d, ds, de, "ENAVAIL", ENAVAIL, "No XENIX semaphores available"); + add_errcode("ENAVAIL", ENAVAIL, "No XENIX semaphores available"); #endif #ifdef ECHILD - inscode(d, ds, de, "ECHILD", ECHILD, "No child processes"); + add_errcode("ECHILD", ECHILD, "No child processes"); #endif #ifdef ELOOP - inscode(d, ds, de, "ELOOP", ELOOP, "Too many symbolic links encountered"); + add_errcode("ELOOP", ELOOP, "Too many symbolic links encountered"); #else #ifdef WSAELOOP - inscode(d, ds, de, "ELOOP", WSAELOOP, "Too many symbolic links encountered"); + add_errcode("ELOOP", WSAELOOP, "Too many symbolic links encountered"); #endif #endif #ifdef EXDEV - inscode(d, ds, de, "EXDEV", EXDEV, "Cross-device link"); + add_errcode("EXDEV", EXDEV, "Cross-device link"); #endif #ifdef E2BIG - inscode(d, ds, de, "E2BIG", E2BIG, "Arg list too long"); + add_errcode("E2BIG", E2BIG, "Arg list too long"); #endif #ifdef ESRCH - inscode(d, ds, de, "ESRCH", ESRCH, "No such process"); + add_errcode("ESRCH", ESRCH, "No such process"); #endif #ifdef EMSGSIZE - inscode(d, ds, de, "EMSGSIZE", EMSGSIZE, "Message too long"); + add_errcode("EMSGSIZE", EMSGSIZE, "Message too long"); #else #ifdef WSAEMSGSIZE - inscode(d, ds, de, "EMSGSIZE", WSAEMSGSIZE, "Message too long"); + add_errcode("EMSGSIZE", WSAEMSGSIZE, "Message too long"); #endif #endif #ifdef EAFNOSUPPORT - inscode(d, ds, de, "EAFNOSUPPORT", EAFNOSUPPORT, "Address family not supported by protocol"); + add_errcode("EAFNOSUPPORT", EAFNOSUPPORT, "Address family not supported by protocol"); #else #ifdef WSAEAFNOSUPPORT - inscode(d, ds, de, "EAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); + add_errcode("EAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); #endif #endif #ifdef EBADR - inscode(d, ds, de, "EBADR", EBADR, "Invalid request descriptor"); + add_errcode("EBADR", EBADR, "Invalid request descriptor"); #endif #ifdef EHOSTDOWN - inscode(d, ds, de, "EHOSTDOWN", EHOSTDOWN, "Host is down"); + add_errcode("EHOSTDOWN", EHOSTDOWN, "Host is down"); #else #ifdef WSAEHOSTDOWN - inscode(d, ds, de, "EHOSTDOWN", WSAEHOSTDOWN, "Host is down"); + add_errcode("EHOSTDOWN", WSAEHOSTDOWN, "Host is down"); #endif #endif #ifdef EPFNOSUPPORT - inscode(d, ds, de, "EPFNOSUPPORT", EPFNOSUPPORT, "Protocol family not supported"); + add_errcode("EPFNOSUPPORT", EPFNOSUPPORT, "Protocol family not supported"); #else #ifdef WSAEPFNOSUPPORT - inscode(d, ds, de, "EPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); + add_errcode("EPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); #endif #endif #ifdef ENOPROTOOPT - inscode(d, ds, de, "ENOPROTOOPT", ENOPROTOOPT, "Protocol not available"); + add_errcode("ENOPROTOOPT", ENOPROTOOPT, "Protocol not available"); #else #ifdef WSAENOPROTOOPT - inscode(d, ds, de, "ENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); + add_errcode("ENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); #endif #endif #ifdef EBUSY - inscode(d, ds, de, "EBUSY", EBUSY, "Device or resource busy"); + add_errcode("EBUSY", EBUSY, "Device or resource busy"); #endif #ifdef EWOULDBLOCK - inscode(d, ds, de, "EWOULDBLOCK", EWOULDBLOCK, "Operation would block"); + add_errcode("EWOULDBLOCK", EWOULDBLOCK, "Operation would block"); #else #ifdef WSAEWOULDBLOCK - inscode(d, ds, de, "EWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); + add_errcode("EWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); #endif #endif #ifdef EBADFD - inscode(d, ds, de, "EBADFD", EBADFD, "File descriptor in bad state"); + add_errcode("EBADFD", EBADFD, "File descriptor in bad state"); #endif #ifdef EDOTDOT - inscode(d, ds, de, "EDOTDOT", EDOTDOT, "RFS specific error"); + add_errcode("EDOTDOT", EDOTDOT, "RFS specific error"); #endif #ifdef EISCONN - inscode(d, ds, de, "EISCONN", EISCONN, "Transport endpoint is already connected"); + add_errcode("EISCONN", EISCONN, "Transport endpoint is already connected"); #else #ifdef WSAEISCONN - inscode(d, ds, de, "EISCONN", WSAEISCONN, "Transport endpoint is already connected"); + add_errcode("EISCONN", WSAEISCONN, "Transport endpoint is already connected"); #endif #endif #ifdef ENOANO - inscode(d, ds, de, "ENOANO", ENOANO, "No anode"); + add_errcode("ENOANO", ENOANO, "No anode"); #endif #ifdef ESHUTDOWN - inscode(d, ds, de, "ESHUTDOWN", ESHUTDOWN, "Cannot send after transport endpoint shutdown"); + add_errcode("ESHUTDOWN", ESHUTDOWN, "Cannot send after transport endpoint shutdown"); #else #ifdef WSAESHUTDOWN - inscode(d, ds, de, "ESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); + add_errcode("ESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); #endif #endif #ifdef ECHRNG - inscode(d, ds, de, "ECHRNG", ECHRNG, "Channel number out of range"); + add_errcode("ECHRNG", ECHRNG, "Channel number out of range"); #endif #ifdef ELIBBAD - inscode(d, ds, de, "ELIBBAD", ELIBBAD, "Accessing a corrupted shared library"); + add_errcode("ELIBBAD", ELIBBAD, "Accessing a corrupted shared library"); #endif #ifdef ENONET - inscode(d, ds, de, "ENONET", ENONET, "Machine is not on the network"); + add_errcode("ENONET", ENONET, "Machine is not on the network"); #endif #ifdef EBADE - inscode(d, ds, de, "EBADE", EBADE, "Invalid exchange"); + add_errcode("EBADE", EBADE, "Invalid exchange"); #endif #ifdef EBADF - inscode(d, ds, de, "EBADF", EBADF, "Bad file number"); + add_errcode("EBADF", EBADF, "Bad file number"); #else #ifdef WSAEBADF - inscode(d, ds, de, "EBADF", WSAEBADF, "Bad file number"); + add_errcode("EBADF", WSAEBADF, "Bad file number"); #endif #endif #ifdef EMULTIHOP - inscode(d, ds, de, "EMULTIHOP", EMULTIHOP, "Multihop attempted"); + add_errcode("EMULTIHOP", EMULTIHOP, "Multihop attempted"); #endif #ifdef EIO - inscode(d, ds, de, "EIO", EIO, "I/O error"); + add_errcode("EIO", EIO, "I/O error"); #endif #ifdef EUNATCH - inscode(d, ds, de, "EUNATCH", EUNATCH, "Protocol driver not attached"); + add_errcode("EUNATCH", EUNATCH, "Protocol driver not attached"); #endif #ifdef EPROTOTYPE - inscode(d, ds, de, "EPROTOTYPE", EPROTOTYPE, "Protocol wrong type for socket"); + add_errcode("EPROTOTYPE", EPROTOTYPE, "Protocol wrong type for socket"); #else #ifdef WSAEPROTOTYPE - inscode(d, ds, de, "EPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); + add_errcode("EPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); #endif #endif #ifdef ENOSPC - inscode(d, ds, de, "ENOSPC", ENOSPC, "No space left on device"); + add_errcode("ENOSPC", ENOSPC, "No space left on device"); #endif #ifdef ENOEXEC - inscode(d, ds, de, "ENOEXEC", ENOEXEC, "Exec format error"); + add_errcode("ENOEXEC", ENOEXEC, "Exec format error"); #endif #ifdef EALREADY - inscode(d, ds, de, "EALREADY", EALREADY, "Operation already in progress"); + add_errcode("EALREADY", EALREADY, "Operation already in progress"); #else #ifdef WSAEALREADY - inscode(d, ds, de, "EALREADY", WSAEALREADY, "Operation already in progress"); + add_errcode("EALREADY", WSAEALREADY, "Operation already in progress"); #endif #endif #ifdef ENETDOWN - inscode(d, ds, de, "ENETDOWN", ENETDOWN, "Network is down"); + add_errcode("ENETDOWN", ENETDOWN, "Network is down"); #else #ifdef WSAENETDOWN - inscode(d, ds, de, "ENETDOWN", WSAENETDOWN, "Network is down"); + add_errcode("ENETDOWN", WSAENETDOWN, "Network is down"); #endif #endif #ifdef ENOTNAM - inscode(d, ds, de, "ENOTNAM", ENOTNAM, "Not a XENIX named type file"); + add_errcode("ENOTNAM", ENOTNAM, "Not a XENIX named type file"); #endif #ifdef EACCES - inscode(d, ds, de, "EACCES", EACCES, "Permission denied"); + add_errcode("EACCES", EACCES, "Permission denied"); #else #ifdef WSAEACCES - inscode(d, ds, de, "EACCES", WSAEACCES, "Permission denied"); + add_errcode("EACCES", WSAEACCES, "Permission denied"); #endif #endif #ifdef ELNRNG - inscode(d, ds, de, "ELNRNG", ELNRNG, "Link number out of range"); + add_errcode("ELNRNG", ELNRNG, "Link number out of range"); #endif #ifdef EILSEQ - inscode(d, ds, de, "EILSEQ", EILSEQ, "Illegal byte sequence"); + add_errcode("EILSEQ", EILSEQ, "Illegal byte sequence"); #endif #ifdef ENOTDIR - inscode(d, ds, de, "ENOTDIR", ENOTDIR, "Not a directory"); + add_errcode("ENOTDIR", ENOTDIR, "Not a directory"); #endif #ifdef ENOTUNIQ - inscode(d, ds, de, "ENOTUNIQ", ENOTUNIQ, "Name not unique on network"); + add_errcode("ENOTUNIQ", ENOTUNIQ, "Name not unique on network"); #endif #ifdef EPERM - inscode(d, ds, de, "EPERM", EPERM, "Operation not permitted"); + add_errcode("EPERM", EPERM, "Operation not permitted"); #endif #ifdef EDOM - inscode(d, ds, de, "EDOM", EDOM, "Math argument out of domain of func"); + add_errcode("EDOM", EDOM, "Math argument out of domain of func"); #endif #ifdef EXFULL - inscode(d, ds, de, "EXFULL", EXFULL, "Exchange full"); + add_errcode("EXFULL", EXFULL, "Exchange full"); #endif #ifdef ECONNREFUSED - inscode(d, ds, de, "ECONNREFUSED", ECONNREFUSED, "Connection refused"); + add_errcode("ECONNREFUSED", ECONNREFUSED, "Connection refused"); #else #ifdef WSAECONNREFUSED - inscode(d, ds, de, "ECONNREFUSED", WSAECONNREFUSED, "Connection refused"); + add_errcode("ECONNREFUSED", WSAECONNREFUSED, "Connection refused"); #endif #endif #ifdef EISDIR - inscode(d, ds, de, "EISDIR", EISDIR, "Is a directory"); + add_errcode("EISDIR", EISDIR, "Is a directory"); #endif #ifdef EPROTONOSUPPORT - inscode(d, ds, de, "EPROTONOSUPPORT", EPROTONOSUPPORT, "Protocol not supported"); + add_errcode("EPROTONOSUPPORT", EPROTONOSUPPORT, "Protocol not supported"); #else #ifdef WSAEPROTONOSUPPORT - inscode(d, ds, de, "EPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); + add_errcode("EPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); #endif #endif #ifdef EROFS - inscode(d, ds, de, "EROFS", EROFS, "Read-only file system"); + add_errcode("EROFS", EROFS, "Read-only file system"); #endif #ifdef EADDRNOTAVAIL - inscode(d, ds, de, "EADDRNOTAVAIL", EADDRNOTAVAIL, "Cannot assign requested address"); + add_errcode("EADDRNOTAVAIL", EADDRNOTAVAIL, "Cannot assign requested address"); #else #ifdef WSAEADDRNOTAVAIL - inscode(d, ds, de, "EADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); + add_errcode("EADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); #endif #endif #ifdef EIDRM - inscode(d, ds, de, "EIDRM", EIDRM, "Identifier removed"); + add_errcode("EIDRM", EIDRM, "Identifier removed"); #endif #ifdef ECOMM - inscode(d, ds, de, "ECOMM", ECOMM, "Communication error on send"); + add_errcode("ECOMM", ECOMM, "Communication error on send"); #endif #ifdef ESRMNT - inscode(d, ds, de, "ESRMNT", ESRMNT, "Srmount error"); + add_errcode("ESRMNT", ESRMNT, "Srmount error"); #endif #ifdef EREMOTEIO - inscode(d, ds, de, "EREMOTEIO", EREMOTEIO, "Remote I/O error"); + add_errcode("EREMOTEIO", EREMOTEIO, "Remote I/O error"); #endif #ifdef EL3RST - inscode(d, ds, de, "EL3RST", EL3RST, "Level 3 reset"); + add_errcode("EL3RST", EL3RST, "Level 3 reset"); #endif #ifdef EBADMSG - inscode(d, ds, de, "EBADMSG", EBADMSG, "Not a data message"); + add_errcode("EBADMSG", EBADMSG, "Not a data message"); #endif #ifdef ENFILE - inscode(d, ds, de, "ENFILE", ENFILE, "File table overflow"); + add_errcode("ENFILE", ENFILE, "File table overflow"); #endif #ifdef ELIBMAX - inscode(d, ds, de, "ELIBMAX", ELIBMAX, "Attempting to link in too many shared libraries"); + add_errcode("ELIBMAX", ELIBMAX, "Attempting to link in too many shared libraries"); #endif #ifdef ESPIPE - inscode(d, ds, de, "ESPIPE", ESPIPE, "Illegal seek"); + add_errcode("ESPIPE", ESPIPE, "Illegal seek"); #endif #ifdef ENOLINK - inscode(d, ds, de, "ENOLINK", ENOLINK, "Link has been severed"); + add_errcode("ENOLINK", ENOLINK, "Link has been severed"); #endif #ifdef ENETRESET - inscode(d, ds, de, "ENETRESET", ENETRESET, "Network dropped connection because of reset"); + add_errcode("ENETRESET", ENETRESET, "Network dropped connection because of reset"); #else #ifdef WSAENETRESET - inscode(d, ds, de, "ENETRESET", WSAENETRESET, "Network dropped connection because of reset"); + add_errcode("ENETRESET", WSAENETRESET, "Network dropped connection because of reset"); #endif #endif #ifdef ETIMEDOUT - inscode(d, ds, de, "ETIMEDOUT", ETIMEDOUT, "Connection timed out"); + add_errcode("ETIMEDOUT", ETIMEDOUT, "Connection timed out"); #else #ifdef WSAETIMEDOUT - inscode(d, ds, de, "ETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); + add_errcode("ETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); #endif #endif #ifdef ENOENT - inscode(d, ds, de, "ENOENT", ENOENT, "No such file or directory"); + add_errcode("ENOENT", ENOENT, "No such file or directory"); #endif #ifdef EEXIST - inscode(d, ds, de, "EEXIST", EEXIST, "File exists"); + add_errcode("EEXIST", EEXIST, "File exists"); #endif #ifdef EDQUOT - inscode(d, ds, de, "EDQUOT", EDQUOT, "Quota exceeded"); + add_errcode("EDQUOT", EDQUOT, "Quota exceeded"); #else #ifdef WSAEDQUOT - inscode(d, ds, de, "EDQUOT", WSAEDQUOT, "Quota exceeded"); + add_errcode("EDQUOT", WSAEDQUOT, "Quota exceeded"); #endif #endif #ifdef ENOSTR - inscode(d, ds, de, "ENOSTR", ENOSTR, "Device not a stream"); + add_errcode("ENOSTR", ENOSTR, "Device not a stream"); #endif #ifdef EBADSLT - inscode(d, ds, de, "EBADSLT", EBADSLT, "Invalid slot"); + add_errcode("EBADSLT", EBADSLT, "Invalid slot"); #endif #ifdef EBADRQC - inscode(d, ds, de, "EBADRQC", EBADRQC, "Invalid request code"); + add_errcode("EBADRQC", EBADRQC, "Invalid request code"); #endif #ifdef ELIBACC - inscode(d, ds, de, "ELIBACC", ELIBACC, "Can not access a needed shared library"); + add_errcode("ELIBACC", ELIBACC, "Can not access a needed shared library"); #endif #ifdef EFAULT - inscode(d, ds, de, "EFAULT", EFAULT, "Bad address"); + add_errcode("EFAULT", EFAULT, "Bad address"); #else #ifdef WSAEFAULT - inscode(d, ds, de, "EFAULT", WSAEFAULT, "Bad address"); + add_errcode("EFAULT", WSAEFAULT, "Bad address"); #endif #endif #ifdef EFBIG - inscode(d, ds, de, "EFBIG", EFBIG, "File too large"); + add_errcode("EFBIG", EFBIG, "File too large"); #endif #ifdef EDEADLK - inscode(d, ds, de, "EDEADLK", EDEADLK, "Resource deadlock would occur"); + add_errcode("EDEADLK", EDEADLK, "Resource deadlock would occur"); #endif #ifdef ENOTCONN - inscode(d, ds, de, "ENOTCONN", ENOTCONN, "Transport endpoint is not connected"); + add_errcode("ENOTCONN", ENOTCONN, "Transport endpoint is not connected"); #else #ifdef WSAENOTCONN - inscode(d, ds, de, "ENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); + add_errcode("ENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); #endif #endif #ifdef EDESTADDRREQ - inscode(d, ds, de, "EDESTADDRREQ", EDESTADDRREQ, "Destination address required"); + add_errcode("EDESTADDRREQ", EDESTADDRREQ, "Destination address required"); #else #ifdef WSAEDESTADDRREQ - inscode(d, ds, de, "EDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); + add_errcode("EDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); #endif #endif #ifdef ELIBSCN - inscode(d, ds, de, "ELIBSCN", ELIBSCN, ".lib section in a.out corrupted"); + add_errcode("ELIBSCN", ELIBSCN, ".lib section in a.out corrupted"); #endif #ifdef ENOLCK - inscode(d, ds, de, "ENOLCK", ENOLCK, "No record locks available"); + add_errcode("ENOLCK", ENOLCK, "No record locks available"); #endif #ifdef EISNAM - inscode(d, ds, de, "EISNAM", EISNAM, "Is a named type file"); + add_errcode("EISNAM", EISNAM, "Is a named type file"); #endif #ifdef ECONNABORTED - inscode(d, ds, de, "ECONNABORTED", ECONNABORTED, "Software caused connection abort"); + add_errcode("ECONNABORTED", ECONNABORTED, "Software caused connection abort"); #else #ifdef WSAECONNABORTED - inscode(d, ds, de, "ECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); + add_errcode("ECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); #endif #endif #ifdef ENETUNREACH - inscode(d, ds, de, "ENETUNREACH", ENETUNREACH, "Network is unreachable"); + add_errcode("ENETUNREACH", ENETUNREACH, "Network is unreachable"); #else #ifdef WSAENETUNREACH - inscode(d, ds, de, "ENETUNREACH", WSAENETUNREACH, "Network is unreachable"); + add_errcode("ENETUNREACH", WSAENETUNREACH, "Network is unreachable"); #endif #endif #ifdef ESTALE - inscode(d, ds, de, "ESTALE", ESTALE, "Stale NFS file handle"); + add_errcode("ESTALE", ESTALE, "Stale NFS file handle"); #else #ifdef WSAESTALE - inscode(d, ds, de, "ESTALE", WSAESTALE, "Stale NFS file handle"); + add_errcode("ESTALE", WSAESTALE, "Stale NFS file handle"); #endif #endif #ifdef ENOSR - inscode(d, ds, de, "ENOSR", ENOSR, "Out of streams resources"); + add_errcode("ENOSR", ENOSR, "Out of streams resources"); #endif #ifdef ENOMEM - inscode(d, ds, de, "ENOMEM", ENOMEM, "Out of memory"); + add_errcode("ENOMEM", ENOMEM, "Out of memory"); #endif #ifdef ENOTSOCK - inscode(d, ds, de, "ENOTSOCK", ENOTSOCK, "Socket operation on non-socket"); + add_errcode("ENOTSOCK", ENOTSOCK, "Socket operation on non-socket"); #else #ifdef WSAENOTSOCK - inscode(d, ds, de, "ENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); + add_errcode("ENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); #endif #endif #ifdef ESTRPIPE - inscode(d, ds, de, "ESTRPIPE", ESTRPIPE, "Streams pipe error"); + add_errcode("ESTRPIPE", ESTRPIPE, "Streams pipe error"); #endif #ifdef EMLINK - inscode(d, ds, de, "EMLINK", EMLINK, "Too many links"); + add_errcode("EMLINK", EMLINK, "Too many links"); #endif #ifdef ERANGE - inscode(d, ds, de, "ERANGE", ERANGE, "Math result not representable"); + add_errcode("ERANGE", ERANGE, "Math result not representable"); #endif #ifdef ELIBEXEC - inscode(d, ds, de, "ELIBEXEC", ELIBEXEC, "Cannot exec a shared library directly"); + add_errcode("ELIBEXEC", ELIBEXEC, "Cannot exec a shared library directly"); #endif #ifdef EL3HLT - inscode(d, ds, de, "EL3HLT", EL3HLT, "Level 3 halted"); + add_errcode("EL3HLT", EL3HLT, "Level 3 halted"); #endif #ifdef ECONNRESET - inscode(d, ds, de, "ECONNRESET", ECONNRESET, "Connection reset by peer"); + add_errcode("ECONNRESET", ECONNRESET, "Connection reset by peer"); #else #ifdef WSAECONNRESET - inscode(d, ds, de, "ECONNRESET", WSAECONNRESET, "Connection reset by peer"); + add_errcode("ECONNRESET", WSAECONNRESET, "Connection reset by peer"); #endif #endif #ifdef EADDRINUSE - inscode(d, ds, de, "EADDRINUSE", EADDRINUSE, "Address already in use"); + add_errcode("EADDRINUSE", EADDRINUSE, "Address already in use"); #else #ifdef WSAEADDRINUSE - inscode(d, ds, de, "EADDRINUSE", WSAEADDRINUSE, "Address already in use"); + add_errcode("EADDRINUSE", WSAEADDRINUSE, "Address already in use"); #endif #endif #ifdef EOPNOTSUPP - inscode(d, ds, de, "EOPNOTSUPP", EOPNOTSUPP, "Operation not supported on transport endpoint"); + add_errcode("EOPNOTSUPP", EOPNOTSUPP, "Operation not supported on transport endpoint"); #else #ifdef WSAEOPNOTSUPP - inscode(d, ds, de, "EOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); + add_errcode("EOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); #endif #endif #ifdef EREMCHG - inscode(d, ds, de, "EREMCHG", EREMCHG, "Remote address changed"); + add_errcode("EREMCHG", EREMCHG, "Remote address changed"); #endif #ifdef EAGAIN - inscode(d, ds, de, "EAGAIN", EAGAIN, "Try again"); + add_errcode("EAGAIN", EAGAIN, "Try again"); #endif #ifdef ENAMETOOLONG - inscode(d, ds, de, "ENAMETOOLONG", ENAMETOOLONG, "File name too long"); + add_errcode("ENAMETOOLONG", ENAMETOOLONG, "File name too long"); #else #ifdef WSAENAMETOOLONG - inscode(d, ds, de, "ENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); + add_errcode("ENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); #endif #endif #ifdef ENOTTY - inscode(d, ds, de, "ENOTTY", ENOTTY, "Not a typewriter"); + add_errcode("ENOTTY", ENOTTY, "Not a typewriter"); #endif #ifdef ERESTART - inscode(d, ds, de, "ERESTART", ERESTART, "Interrupted system call should be restarted"); + add_errcode("ERESTART", ERESTART, "Interrupted system call should be restarted"); #endif #ifdef ESOCKTNOSUPPORT - inscode(d, ds, de, "ESOCKTNOSUPPORT", ESOCKTNOSUPPORT, "Socket type not supported"); + add_errcode("ESOCKTNOSUPPORT", ESOCKTNOSUPPORT, "Socket type not supported"); #else #ifdef WSAESOCKTNOSUPPORT - inscode(d, ds, de, "ESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); + add_errcode("ESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); #endif #endif #ifdef ETIME - inscode(d, ds, de, "ETIME", ETIME, "Timer expired"); + add_errcode("ETIME", ETIME, "Timer expired"); #endif #ifdef EBFONT - inscode(d, ds, de, "EBFONT", EBFONT, "Bad font file format"); + add_errcode("EBFONT", EBFONT, "Bad font file format"); #endif #ifdef EDEADLOCK - inscode(d, ds, de, "EDEADLOCK", EDEADLOCK, "Error EDEADLOCK"); + add_errcode("EDEADLOCK", EDEADLOCK, "Error EDEADLOCK"); #endif #ifdef ETOOMANYREFS - inscode(d, ds, de, "ETOOMANYREFS", ETOOMANYREFS, "Too many references: cannot splice"); + add_errcode("ETOOMANYREFS", ETOOMANYREFS, "Too many references: cannot splice"); #else #ifdef WSAETOOMANYREFS - inscode(d, ds, de, "ETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); + add_errcode("ETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); #endif #endif #ifdef EMFILE - inscode(d, ds, de, "EMFILE", EMFILE, "Too many open files"); + add_errcode("EMFILE", EMFILE, "Too many open files"); #else #ifdef WSAEMFILE - inscode(d, ds, de, "EMFILE", WSAEMFILE, "Too many open files"); + add_errcode("EMFILE", WSAEMFILE, "Too many open files"); #endif #endif #ifdef ETXTBSY - inscode(d, ds, de, "ETXTBSY", ETXTBSY, "Text file busy"); + add_errcode("ETXTBSY", ETXTBSY, "Text file busy"); #endif #ifdef EINPROGRESS - inscode(d, ds, de, "EINPROGRESS", EINPROGRESS, "Operation now in progress"); + add_errcode("EINPROGRESS", EINPROGRESS, "Operation now in progress"); #else #ifdef WSAEINPROGRESS - inscode(d, ds, de, "EINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); + add_errcode("EINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); #endif #endif #ifdef ENXIO - inscode(d, ds, de, "ENXIO", ENXIO, "No such device or address"); + add_errcode("ENXIO", ENXIO, "No such device or address"); #endif #ifdef ENOPKG - inscode(d, ds, de, "ENOPKG", ENOPKG, "Package not installed"); + add_errcode("ENOPKG", ENOPKG, "Package not installed"); #endif #ifdef WSASY - inscode(d, ds, de, "WSASY", WSASY, "Error WSASY"); + add_errcode("WSASY", WSASY, "Error WSASY"); #endif #ifdef WSAEHOSTDOWN - inscode(d, ds, de, "WSAEHOSTDOWN", WSAEHOSTDOWN, "Host is down"); + add_errcode("WSAEHOSTDOWN", WSAEHOSTDOWN, "Host is down"); #endif #ifdef WSAENETDOWN - inscode(d, ds, de, "WSAENETDOWN", WSAENETDOWN, "Network is down"); + add_errcode("WSAENETDOWN", WSAENETDOWN, "Network is down"); #endif #ifdef WSAENOTSOCK - inscode(d, ds, de, "WSAENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); + add_errcode("WSAENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); #endif #ifdef WSAEHOSTUNREACH - inscode(d, ds, de, "WSAEHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); + add_errcode("WSAEHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); #endif #ifdef WSAELOOP - inscode(d, ds, de, "WSAELOOP", WSAELOOP, "Too many symbolic links encountered"); + add_errcode("WSAELOOP", WSAELOOP, "Too many symbolic links encountered"); #endif #ifdef WSAEMFILE - inscode(d, ds, de, "WSAEMFILE", WSAEMFILE, "Too many open files"); + add_errcode("WSAEMFILE", WSAEMFILE, "Too many open files"); #endif #ifdef WSAESTALE - inscode(d, ds, de, "WSAESTALE", WSAESTALE, "Stale NFS file handle"); + add_errcode("WSAESTALE", WSAESTALE, "Stale NFS file handle"); #endif #ifdef WSAVERNOTSUPPORTED - inscode(d, ds, de, "WSAVERNOTSUPPORTED", WSAVERNOTSUPPORTED, "Error WSAVERNOTSUPPORTED"); + add_errcode("WSAVERNOTSUPPORTED", WSAVERNOTSUPPORTED, "Error WSAVERNOTSUPPORTED"); #endif #ifdef WSAENETUNREACH - inscode(d, ds, de, "WSAENETUNREACH", WSAENETUNREACH, "Network is unreachable"); + add_errcode("WSAENETUNREACH", WSAENETUNREACH, "Network is unreachable"); #endif #ifdef WSAEPROCLIM - inscode(d, ds, de, "WSAEPROCLIM", WSAEPROCLIM, "Error WSAEPROCLIM"); + add_errcode("WSAEPROCLIM", WSAEPROCLIM, "Error WSAEPROCLIM"); #endif #ifdef WSAEFAULT - inscode(d, ds, de, "WSAEFAULT", WSAEFAULT, "Bad address"); + add_errcode("WSAEFAULT", WSAEFAULT, "Bad address"); #endif #ifdef WSANOTINITIALISED - inscode(d, ds, de, "WSANOTINITIALISED", WSANOTINITIALISED, "Error WSANOTINITIALISED"); + add_errcode("WSANOTINITIALISED", WSANOTINITIALISED, "Error WSANOTINITIALISED"); #endif #ifdef WSAEUSERS - inscode(d, ds, de, "WSAEUSERS", WSAEUSERS, "Too many users"); + add_errcode("WSAEUSERS", WSAEUSERS, "Too many users"); #endif #ifdef WSAMAKEASYNCREPL - inscode(d, ds, de, "WSAMAKEASYNCREPL", WSAMAKEASYNCREPL, "Error WSAMAKEASYNCREPL"); + add_errcode("WSAMAKEASYNCREPL", WSAMAKEASYNCREPL, "Error WSAMAKEASYNCREPL"); #endif #ifdef WSAENOPROTOOPT - inscode(d, ds, de, "WSAENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); + add_errcode("WSAENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); #endif #ifdef WSAECONNABORTED - inscode(d, ds, de, "WSAECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); + add_errcode("WSAECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); #endif #ifdef WSAENAMETOOLONG - inscode(d, ds, de, "WSAENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); + add_errcode("WSAENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); #endif #ifdef WSAENOTEMPTY - inscode(d, ds, de, "WSAENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); + add_errcode("WSAENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); #endif #ifdef WSAESHUTDOWN - inscode(d, ds, de, "WSAESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); + add_errcode("WSAESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); #endif #ifdef WSAEAFNOSUPPORT - inscode(d, ds, de, "WSAEAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); + add_errcode("WSAEAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); #endif #ifdef WSAETOOMANYREFS - inscode(d, ds, de, "WSAETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); + add_errcode("WSAETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); #endif #ifdef WSAEACCES - inscode(d, ds, de, "WSAEACCES", WSAEACCES, "Permission denied"); + add_errcode("WSAEACCES", WSAEACCES, "Permission denied"); #endif #ifdef WSATR - inscode(d, ds, de, "WSATR", WSATR, "Error WSATR"); + add_errcode("WSATR", WSATR, "Error WSATR"); #endif #ifdef WSABASEERR - inscode(d, ds, de, "WSABASEERR", WSABASEERR, "Error WSABASEERR"); + add_errcode("WSABASEERR", WSABASEERR, "Error WSABASEERR"); #endif #ifdef WSADESCRIPTIO - inscode(d, ds, de, "WSADESCRIPTIO", WSADESCRIPTIO, "Error WSADESCRIPTIO"); + add_errcode("WSADESCRIPTIO", WSADESCRIPTIO, "Error WSADESCRIPTIO"); #endif #ifdef WSAEMSGSIZE - inscode(d, ds, de, "WSAEMSGSIZE", WSAEMSGSIZE, "Message too long"); + add_errcode("WSAEMSGSIZE", WSAEMSGSIZE, "Message too long"); #endif #ifdef WSAEBADF - inscode(d, ds, de, "WSAEBADF", WSAEBADF, "Bad file number"); + add_errcode("WSAEBADF", WSAEBADF, "Bad file number"); #endif #ifdef WSAECONNRESET - inscode(d, ds, de, "WSAECONNRESET", WSAECONNRESET, "Connection reset by peer"); + add_errcode("WSAECONNRESET", WSAECONNRESET, "Connection reset by peer"); #endif #ifdef WSAGETSELECTERRO - inscode(d, ds, de, "WSAGETSELECTERRO", WSAGETSELECTERRO, "Error WSAGETSELECTERRO"); + add_errcode("WSAGETSELECTERRO", WSAGETSELECTERRO, "Error WSAGETSELECTERRO"); #endif #ifdef WSAETIMEDOUT - inscode(d, ds, de, "WSAETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); + add_errcode("WSAETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); #endif #ifdef WSAENOBUFS - inscode(d, ds, de, "WSAENOBUFS", WSAENOBUFS, "No buffer space available"); + add_errcode("WSAENOBUFS", WSAENOBUFS, "No buffer space available"); #endif #ifdef WSAEDISCON - inscode(d, ds, de, "WSAEDISCON", WSAEDISCON, "Error WSAEDISCON"); + add_errcode("WSAEDISCON", WSAEDISCON, "Error WSAEDISCON"); #endif #ifdef WSAEINTR - inscode(d, ds, de, "WSAEINTR", WSAEINTR, "Interrupted system call"); + add_errcode("WSAEINTR", WSAEINTR, "Interrupted system call"); #endif #ifdef WSAEPROTOTYPE - inscode(d, ds, de, "WSAEPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); + add_errcode("WSAEPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); #endif #ifdef WSAHOS - inscode(d, ds, de, "WSAHOS", WSAHOS, "Error WSAHOS"); + add_errcode("WSAHOS", WSAHOS, "Error WSAHOS"); #endif #ifdef WSAEADDRINUSE - inscode(d, ds, de, "WSAEADDRINUSE", WSAEADDRINUSE, "Address already in use"); + add_errcode("WSAEADDRINUSE", WSAEADDRINUSE, "Address already in use"); #endif #ifdef WSAEADDRNOTAVAIL - inscode(d, ds, de, "WSAEADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); + add_errcode("WSAEADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); #endif #ifdef WSAEALREADY - inscode(d, ds, de, "WSAEALREADY", WSAEALREADY, "Operation already in progress"); + add_errcode("WSAEALREADY", WSAEALREADY, "Operation already in progress"); #endif #ifdef WSAEPROTONOSUPPORT - inscode(d, ds, de, "WSAEPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); + add_errcode("WSAEPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); #endif #ifdef WSASYSNOTREADY - inscode(d, ds, de, "WSASYSNOTREADY", WSASYSNOTREADY, "Error WSASYSNOTREADY"); + add_errcode("WSASYSNOTREADY", WSASYSNOTREADY, "Error WSASYSNOTREADY"); #endif #ifdef WSAEWOULDBLOCK - inscode(d, ds, de, "WSAEWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); + add_errcode("WSAEWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); #endif #ifdef WSAEPFNOSUPPORT - inscode(d, ds, de, "WSAEPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); + add_errcode("WSAEPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); #endif #ifdef WSAEOPNOTSUPP - inscode(d, ds, de, "WSAEOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); + add_errcode("WSAEOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); #endif #ifdef WSAEISCONN - inscode(d, ds, de, "WSAEISCONN", WSAEISCONN, "Transport endpoint is already connected"); + add_errcode("WSAEISCONN", WSAEISCONN, "Transport endpoint is already connected"); #endif #ifdef WSAEDQUOT - inscode(d, ds, de, "WSAEDQUOT", WSAEDQUOT, "Quota exceeded"); + add_errcode("WSAEDQUOT", WSAEDQUOT, "Quota exceeded"); #endif #ifdef WSAENOTCONN - inscode(d, ds, de, "WSAENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); + add_errcode("WSAENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); #endif #ifdef WSAEREMOTE - inscode(d, ds, de, "WSAEREMOTE", WSAEREMOTE, "Object is remote"); + add_errcode("WSAEREMOTE", WSAEREMOTE, "Object is remote"); #endif #ifdef WSAEINVAL - inscode(d, ds, de, "WSAEINVAL", WSAEINVAL, "Invalid argument"); + add_errcode("WSAEINVAL", WSAEINVAL, "Invalid argument"); #endif #ifdef WSAEINPROGRESS - inscode(d, ds, de, "WSAEINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); + add_errcode("WSAEINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); #endif #ifdef WSAGETSELECTEVEN - inscode(d, ds, de, "WSAGETSELECTEVEN", WSAGETSELECTEVEN, "Error WSAGETSELECTEVEN"); + add_errcode("WSAGETSELECTEVEN", WSAGETSELECTEVEN, "Error WSAGETSELECTEVEN"); #endif #ifdef WSAESOCKTNOSUPPORT - inscode(d, ds, de, "WSAESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); + add_errcode("WSAESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); #endif #ifdef WSAGETASYNCERRO - inscode(d, ds, de, "WSAGETASYNCERRO", WSAGETASYNCERRO, "Error WSAGETASYNCERRO"); + add_errcode("WSAGETASYNCERRO", WSAGETASYNCERRO, "Error WSAGETASYNCERRO"); #endif #ifdef WSAMAKESELECTREPL - inscode(d, ds, de, "WSAMAKESELECTREPL", WSAMAKESELECTREPL, "Error WSAMAKESELECTREPL"); + add_errcode("WSAMAKESELECTREPL", WSAMAKESELECTREPL, "Error WSAMAKESELECTREPL"); #endif #ifdef WSAGETASYNCBUFLE - inscode(d, ds, de, "WSAGETASYNCBUFLE", WSAGETASYNCBUFLE, "Error WSAGETASYNCBUFLE"); + add_errcode("WSAGETASYNCBUFLE", WSAGETASYNCBUFLE, "Error WSAGETASYNCBUFLE"); #endif #ifdef WSAEDESTADDRREQ - inscode(d, ds, de, "WSAEDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); + add_errcode("WSAEDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); #endif #ifdef WSAECONNREFUSED - inscode(d, ds, de, "WSAECONNREFUSED", WSAECONNREFUSED, "Connection refused"); + add_errcode("WSAECONNREFUSED", WSAECONNREFUSED, "Connection refused"); #endif #ifdef WSAENETRESET - inscode(d, ds, de, "WSAENETRESET", WSAENETRESET, "Network dropped connection because of reset"); + add_errcode("WSAENETRESET", WSAENETRESET, "Network dropped connection because of reset"); #endif #ifdef WSAN - inscode(d, ds, de, "WSAN", WSAN, "Error WSAN"); + add_errcode("WSAN", WSAN, "Error WSAN"); #endif #ifdef ENOMEDIUM - inscode(d, ds, de, "ENOMEDIUM", ENOMEDIUM, "No medium found"); + add_errcode("ENOMEDIUM", ENOMEDIUM, "No medium found"); #endif #ifdef EMEDIUMTYPE - inscode(d, ds, de, "EMEDIUMTYPE", EMEDIUMTYPE, "Wrong medium type"); + add_errcode("EMEDIUMTYPE", EMEDIUMTYPE, "Wrong medium type"); #endif #ifdef ECANCELED - inscode(d, ds, de, "ECANCELED", ECANCELED, "Operation Canceled"); + add_errcode("ECANCELED", ECANCELED, "Operation Canceled"); #endif #ifdef ENOKEY - inscode(d, ds, de, "ENOKEY", ENOKEY, "Required key not available"); + add_errcode("ENOKEY", ENOKEY, "Required key not available"); #endif #ifdef EKEYEXPIRED - inscode(d, ds, de, "EKEYEXPIRED", EKEYEXPIRED, "Key has expired"); + add_errcode("EKEYEXPIRED", EKEYEXPIRED, "Key has expired"); #endif #ifdef EKEYREVOKED - inscode(d, ds, de, "EKEYREVOKED", EKEYREVOKED, "Key has been revoked"); + add_errcode("EKEYREVOKED", EKEYREVOKED, "Key has been revoked"); #endif #ifdef EKEYREJECTED - inscode(d, ds, de, "EKEYREJECTED", EKEYREJECTED, "Key was rejected by service"); + add_errcode("EKEYREJECTED", EKEYREJECTED, "Key was rejected by service"); #endif #ifdef EOWNERDEAD - inscode(d, ds, de, "EOWNERDEAD", EOWNERDEAD, "Owner died"); + add_errcode("EOWNERDEAD", EOWNERDEAD, "Owner died"); #endif #ifdef ENOTRECOVERABLE - inscode(d, ds, de, "ENOTRECOVERABLE", ENOTRECOVERABLE, "State not recoverable"); + add_errcode("ENOTRECOVERABLE", ENOTRECOVERABLE, "State not recoverable"); #endif #ifdef ERFKILL - inscode(d, ds, de, "ERFKILL", ERFKILL, "Operation not possible due to RF-kill"); + add_errcode("ERFKILL", ERFKILL, "Operation not possible due to RF-kill"); #endif /* Solaris-specific errnos */ #ifdef ECANCELED - inscode(d, ds, de, "ECANCELED", ECANCELED, "Operation canceled"); + add_errcode("ECANCELED", ECANCELED, "Operation canceled"); #endif #ifdef ENOTSUP - inscode(d, ds, de, "ENOTSUP", ENOTSUP, "Operation not supported"); + add_errcode("ENOTSUP", ENOTSUP, "Operation not supported"); #endif #ifdef EOWNERDEAD - inscode(d, ds, de, "EOWNERDEAD", EOWNERDEAD, "Process died with the lock"); + add_errcode("EOWNERDEAD", EOWNERDEAD, "Process died with the lock"); #endif #ifdef ENOTRECOVERABLE - inscode(d, ds, de, "ENOTRECOVERABLE", ENOTRECOVERABLE, "Lock is not recoverable"); + add_errcode("ENOTRECOVERABLE", ENOTRECOVERABLE, "Lock is not recoverable"); #endif #ifdef ELOCKUNMAPPED - inscode(d, ds, de, "ELOCKUNMAPPED", ELOCKUNMAPPED, "Locked lock was unmapped"); + add_errcode("ELOCKUNMAPPED", ELOCKUNMAPPED, "Locked lock was unmapped"); #endif #ifdef ENOTACTIVE - inscode(d, ds, de, "ENOTACTIVE", ENOTACTIVE, "Facility is not active"); + add_errcode("ENOTACTIVE", ENOTACTIVE, "Facility is not active"); #endif /* MacOSX specific errnos */ #ifdef EAUTH - inscode(d, ds, de, "EAUTH", EAUTH, "Authentication error"); + add_errcode("EAUTH", EAUTH, "Authentication error"); #endif #ifdef EBADARCH - inscode(d, ds, de, "EBADARCH", EBADARCH, "Bad CPU type in executable"); + add_errcode("EBADARCH", EBADARCH, "Bad CPU type in executable"); #endif #ifdef EBADEXEC - inscode(d, ds, de, "EBADEXEC", EBADEXEC, "Bad executable (or shared library)"); + add_errcode("EBADEXEC", EBADEXEC, "Bad executable (or shared library)"); #endif #ifdef EBADMACHO - inscode(d, ds, de, "EBADMACHO", EBADMACHO, "Malformed Mach-o file"); + add_errcode("EBADMACHO", EBADMACHO, "Malformed Mach-o file"); #endif #ifdef EBADRPC - inscode(d, ds, de, "EBADRPC", EBADRPC, "RPC struct is bad"); + add_errcode("EBADRPC", EBADRPC, "RPC struct is bad"); #endif #ifdef EDEVERR - inscode(d, ds, de, "EDEVERR", EDEVERR, "Device error"); + add_errcode("EDEVERR", EDEVERR, "Device error"); #endif #ifdef EFTYPE - inscode(d, ds, de, "EFTYPE", EFTYPE, "Inappropriate file type or format"); + add_errcode("EFTYPE", EFTYPE, "Inappropriate file type or format"); #endif #ifdef ENEEDAUTH - inscode(d, ds, de, "ENEEDAUTH", ENEEDAUTH, "Need authenticator"); + add_errcode("ENEEDAUTH", ENEEDAUTH, "Need authenticator"); #endif #ifdef ENOATTR - inscode(d, ds, de, "ENOATTR", ENOATTR, "Attribute not found"); + add_errcode("ENOATTR", ENOATTR, "Attribute not found"); #endif #ifdef ENOPOLICY - inscode(d, ds, de, "ENOPOLICY", ENOPOLICY, "Policy not found"); + add_errcode("ENOPOLICY", ENOPOLICY, "Policy not found"); #endif #ifdef EPROCLIM - inscode(d, ds, de, "EPROCLIM", EPROCLIM, "Too many processes"); + add_errcode("EPROCLIM", EPROCLIM, "Too many processes"); #endif #ifdef EPROCUNAVAIL - inscode(d, ds, de, "EPROCUNAVAIL", EPROCUNAVAIL, "Bad procedure for program"); + add_errcode("EPROCUNAVAIL", EPROCUNAVAIL, "Bad procedure for program"); #endif #ifdef EPROGMISMATCH - inscode(d, ds, de, "EPROGMISMATCH", EPROGMISMATCH, "Program version wrong"); + add_errcode("EPROGMISMATCH", EPROGMISMATCH, "Program version wrong"); #endif #ifdef EPROGUNAVAIL - inscode(d, ds, de, "EPROGUNAVAIL", EPROGUNAVAIL, "RPC prog. not avail"); + add_errcode("EPROGUNAVAIL", EPROGUNAVAIL, "RPC prog. not avail"); #endif #ifdef EPWROFF - inscode(d, ds, de, "EPWROFF", EPWROFF, "Device power is off"); + add_errcode("EPWROFF", EPWROFF, "Device power is off"); #endif #ifdef ERPCMISMATCH - inscode(d, ds, de, "ERPCMISMATCH", ERPCMISMATCH, "RPC version wrong"); + add_errcode("ERPCMISMATCH", ERPCMISMATCH, "RPC version wrong"); #endif #ifdef ESHLIBVERS - inscode(d, ds, de, "ESHLIBVERS", ESHLIBVERS, "Shared library version mismatch"); + add_errcode("ESHLIBVERS", ESHLIBVERS, "Shared library version mismatch"); #endif - Py_DECREF(de); - return m; + Py_DECREF(error_dict); + return 0; +} + +static PyModuleDef_Slot errno_slots[] = { + {Py_mod_exec, errno_exec}, + {0, NULL} +}; + +PyDoc_STRVAR(errno__doc__, +"This module makes available standard errno system symbols.\n\ +\n\ +The value of each symbol is the corresponding integer value,\n\ +e.g., on most systems, errno.ENOENT equals the integer 2.\n\ +\n\ +The dictionary errno.errorcode maps numeric codes to symbol names,\n\ +e.g., errno.errorcode[2] could be the string 'ENOENT'.\n\ +\n\ +Symbols that are not relevant to the underlying system are not defined.\n\ +\n\ +To map error codes to error messages, use the function os.strerror(),\n\ +e.g. os.strerror(2) could return 'No such file or directory'."); + +static struct PyModuleDef errnomodule = { + PyModuleDef_HEAD_INIT, + .m_name = "errno", + .m_doc = errno__doc__, + .m_size = 0, + .m_methods = errno_methods, + .m_slots = errno_slots, +}; + +PyMODINIT_FUNC +PyInit_errno(void) +{ + return PyModuleDef_Init(&errnomodule); } From webhook-mailer at python.org Thu May 7 06:37:59 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Thu, 07 May 2020 10:37:59 -0000 Subject: [Python-checkins] bpo-40334: Fix error location upon parsing an invalid string literal (GH-19962) Message-ID: https://github.com/python/cpython/commit/2f37c355ab0e9ec9c1753985d27c41fa0bd719b9 commit: 2f37c355ab0e9ec9c1753985d27c41fa0bd719b9 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-07T11:37:51+01:00 summary: bpo-40334: Fix error location upon parsing an invalid string literal (GH-19962) When parsing a string with an invalid escape, the old parser used to point to the beginning of the invalid string. This commit changes the new parser to match that behaviour, since it's currently pointing to the end of the string (or to be more precise, to the beginning of the next token). files: M Lib/test/test_cmd_line_script.py M Lib/test/test_string_literals.py M Parser/pegen/parse_string.c M Parser/pegen/parse_string.h M Parser/pegen/pegen.c M Parser/pegen/pegen.h diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index 1fc9500738f35..171340581af22 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -648,7 +648,7 @@ def test_syntaxerror_invalid_escape_sequence_multi_line(self): self.assertEqual( stderr.splitlines()[-3:], [ b' foo = """\\q"""', - b' ^', + b' ^', b'SyntaxError: invalid escape sequence \\q' ], ) diff --git a/Lib/test/test_string_literals.py b/Lib/test/test_string_literals.py index 5b5477d14d467..9565ee2485afd 100644 --- a/Lib/test/test_string_literals.py +++ b/Lib/test/test_string_literals.py @@ -118,8 +118,7 @@ def test_eval_str_invalid_escape(self): eval("'''\n\\z'''") self.assertEqual(len(w), 1) self.assertEqual(w[0].filename, '') - if use_old_parser(): - self.assertEqual(w[0].lineno, 1) + self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('error', category=DeprecationWarning) @@ -128,8 +127,8 @@ def test_eval_str_invalid_escape(self): exc = cm.exception self.assertEqual(w, []) self.assertEqual(exc.filename, '') - if use_old_parser(): - self.assertEqual(exc.lineno, 1) + self.assertEqual(exc.lineno, 1) + self.assertEqual(exc.offset, 1) def test_eval_str_raw(self): self.assertEqual(eval(""" r'x' """), 'x') diff --git a/Parser/pegen/parse_string.c b/Parser/pegen/parse_string.c index d96303dc183fa..ca4b733c153b5 100644 --- a/Parser/pegen/parse_string.c +++ b/Parser/pegen/parse_string.c @@ -12,7 +12,7 @@ // file (like "_PyPegen_raise_syntax_error"). static int -warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char) +warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char, Token *t) { PyObject *msg = PyUnicode_FromFormat("invalid escape sequence \\%c", first_invalid_escape_char); @@ -20,11 +20,16 @@ warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char) return -1; } if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg, p->tok->filename, - p->tok->lineno, NULL, NULL) < 0) { + t->lineno, NULL, NULL) < 0) { if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) { /* Replace the DeprecationWarning exception with a SyntaxError to get a more accurate error report */ PyErr_Clear(); + + /* This is needed, in order for the SyntaxError to point to the token t, + since _PyPegen_raise_error uses p->tokens[p->fill - 1] for the + error location, if p->known_err_token is not set. */ + p->known_err_token = t; RAISE_SYNTAX_ERROR("invalid escape sequence \\%c", first_invalid_escape_char); } Py_DECREF(msg); @@ -47,7 +52,7 @@ decode_utf8(const char **sPtr, const char *end) } static PyObject * -decode_unicode_with_escapes(Parser *parser, const char *s, size_t len) +decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t) { PyObject *v, *u; char *buf; @@ -110,7 +115,7 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len) v = _PyUnicode_DecodeUnicodeEscape(s, len, NULL, &first_invalid_escape); if (v != NULL && first_invalid_escape != NULL) { - if (warn_invalid_escape_sequence(parser, *first_invalid_escape) < 0) { + if (warn_invalid_escape_sequence(parser, *first_invalid_escape, t) < 0) { /* We have not decref u before because first_invalid_escape points inside u. */ Py_XDECREF(u); @@ -123,7 +128,7 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len) } static PyObject * -decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len) +decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len, Token *t) { const char *first_invalid_escape; PyObject *result = _PyBytes_DecodeEscape(s, len, NULL, &first_invalid_escape); @@ -132,7 +137,7 @@ decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len) } if (first_invalid_escape != NULL) { - if (warn_invalid_escape_sequence(p, *first_invalid_escape) < 0) { + if (warn_invalid_escape_sequence(p, *first_invalid_escape, t) < 0) { Py_DECREF(result); return NULL; } @@ -146,9 +151,14 @@ decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len) If the string is an f-string, set *fstr and *fstrlen to the unparsed string object. Return 0 if no errors occurred. */ int -_PyPegen_parsestr(Parser *p, const char *s, int *bytesmode, int *rawmode, PyObject **result, - const char **fstr, Py_ssize_t *fstrlen) +_PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result, + const char **fstr, Py_ssize_t *fstrlen, Token *t) { + const char *s = PyBytes_AsString(t->bytes); + if (s == NULL) { + return -1; + } + size_t len; int quote = Py_CHARMASK(*s); int fmode = 0; @@ -245,7 +255,7 @@ _PyPegen_parsestr(Parser *p, const char *s, int *bytesmode, int *rawmode, PyObje *result = PyBytes_FromStringAndSize(s, len); } else { - *result = decode_bytes_with_escapes(p, s, len); + *result = decode_bytes_with_escapes(p, s, len, t); } } else { @@ -253,7 +263,7 @@ _PyPegen_parsestr(Parser *p, const char *s, int *bytesmode, int *rawmode, PyObje *result = PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL); } else { - *result = decode_unicode_with_escapes(p, s, len); + *result = decode_unicode_with_escapes(p, s, len, t); } } return *result == NULL ? -1 : 0; @@ -637,7 +647,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, */ static int fstring_find_literal(Parser *p, const char **str, const char *end, int raw, - PyObject **literal, int recurse_lvl) + PyObject **literal, int recurse_lvl, Token *t) { /* Get any literal string. It ends when we hit an un-doubled left brace (which isn't part of a unicode name escape such as @@ -660,7 +670,7 @@ fstring_find_literal(Parser *p, const char **str, const char *end, int raw, } break; } - if (ch == '{' && warn_invalid_escape_sequence(p, ch) < 0) { + if (ch == '{' && warn_invalid_escape_sequence(p, ch, t) < 0) { return -1; } } @@ -704,7 +714,7 @@ fstring_find_literal(Parser *p, const char **str, const char *end, int raw, NULL, NULL); else *literal = decode_unicode_with_escapes(p, literal_start, - s - literal_start); + s - literal_start, t); if (!*literal) return -1; } @@ -1041,7 +1051,7 @@ fstring_find_literal_and_expr(Parser *p, const char **str, const char *end, int assert(*literal == NULL && *expression == NULL); /* Get any literal string. */ - result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl); + result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl, t); if (result < 0) goto error; diff --git a/Parser/pegen/parse_string.h b/Parser/pegen/parse_string.h index 4f2aa94fc19b0..cd85bd57d0a38 100644 --- a/Parser/pegen/parse_string.h +++ b/Parser/pegen/parse_string.h @@ -34,8 +34,8 @@ typedef struct { } FstringParser; void _PyPegen_FstringParser_Init(FstringParser *); -int _PyPegen_parsestr(Parser *, const char *, int *, int *, PyObject **, - const char **, Py_ssize_t *); +int _PyPegen_parsestr(Parser *, int *, int *, PyObject **, + const char **, Py_ssize_t *, Token *); int _PyPegen_FstringParser_ConcatFstring(Parser *, FstringParser *, const char **, const char *, int, int, Token *, Token *, Token *); diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index c311593af70f5..06af53b3597f7 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -383,7 +383,7 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const ch PyObject *errstr = NULL; PyObject *loc = NULL; PyObject *tmp = NULL; - Token *t = p->tokens[p->fill - 1]; + Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1]; Py_ssize_t col_number = !with_col_number; va_list va; p->error_indicator = 1; @@ -1053,6 +1053,7 @@ _PyPegen_Parser_New(struct tok_state *tok, int start_rule, int flags, p->starting_col_offset = 0; p->flags = flags; p->feature_version = feature_version; + p->known_err_token = NULL; return p; } @@ -1972,12 +1973,7 @@ _PyPegen_concatenate_strings(Parser *p, asdl_seq *strings) const char *fstr; Py_ssize_t fstrlen = -1; - char *this_str = PyBytes_AsString(t->bytes); - if (!this_str) { - goto error; - } - - if (_PyPegen_parsestr(p, this_str, &this_bytesmode, &this_rawmode, &s, &fstr, &fstrlen) != 0) { + if (_PyPegen_parsestr(p, &this_bytesmode, &this_rawmode, &s, &fstr, &fstrlen, t) != 0) { goto error; } diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index cbe6f197ac742..ffb18e47e4a9a 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -71,6 +71,7 @@ typedef struct { int flags; int feature_version; growable_comment_array type_ignore_comments; + Token *known_err_token; } Parser; typedef struct { From webhook-mailer at python.org Thu May 7 06:44:11 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Thu, 07 May 2020 10:44:11 -0000 Subject: [Python-checkins] bpo-40334: Error message for invalid default args in function call (GH-19973) Message-ID: https://github.com/python/cpython/commit/4638c6429575bd6de26b12b2af5df74d6568b553 commit: 4638c6429575bd6de26b12b2af5df74d6568b553 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-07T11:44:06+01:00 summary: bpo-40334: Error message for invalid default args in function call (GH-19973) When parsing something like `f(g()=2)`, where the name of a default arg is not a NAME, but an arbitrary expression, a specialised error message is emitted. files: M Grammar/python.gram M Lib/test/test_exceptions.py M Lib/test/test_peg_parser.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 3d8a39b1d5906..574e1e1421644 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -548,10 +548,12 @@ kwarg_or_starred[KeywordOrStarred*]: | a=NAME '=' b=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) } | a=starred_expression { _PyPegen_keyword_or_starred(p, a, 0) } + | invalid_kwarg kwarg_or_double_starred[KeywordOrStarred*]: | a=NAME '=' b=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) } | '**' a=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(NULL, a, EXTRA)), 1) } + | invalid_kwarg # NOTE: star_targets may contain *bitwise_or, targets may not. star_targets[expr_ty]: @@ -620,6 +622,8 @@ incorrect_arguments: | expression for_if_clauses ',' [args | expression for_if_clauses] { RAISE_SYNTAX_ERROR("Generator expression must be parenthesized") } | a=args ',' args { _PyPegen_arguments_parsing_error(p, a) } +invalid_kwarg: + | expression '=' { RAISE_SYNTAX_ERROR("expression cannot contain assignment, perhaps you meant \"==\"?") } invalid_named_expression: | a=expression ':=' expression { RAISE_SYNTAX_ERROR("cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index d83b73ab340c3..dbd7fa6bdd938 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -242,16 +242,16 @@ def baz(): check('from __future__ import doesnt_exist', 1, 1) check('from __future__ import braces', 1, 1) check('x=1\nfrom __future__ import division', 2, 1) + check('(yield i) = 2', 1, 1) check('def f(*):\n pass', 1, 7 if support.use_old_parser() else 8) + check('foo(1=2)', 1, 5 if support.use_old_parser() else 6) @support.skip_if_new_parser("Pegen column offsets might be different") def testSyntaxErrorOffsetCustom(self): self.check('for 1 in []: pass', 1, 5) self.check('[*x for x in xs]', 1, 2) self.check('def f():\n x, y: int', 2, 3) - self.check('(yield i) = 2', 1, 1) self.check('foo(x for x in range(10), 100)', 1, 5) - self.check('foo(1=2)', 1, 5) @cpython_only def testSettingException(self): diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index d6939fdbf618a..df2d46d8827f0 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -609,6 +609,9 @@ def f(): ("lambda *: pass", "named arguments must follow bare *"), ("lambda *,: pass", "named arguments must follow bare *"), ("lambda *, **a: pass", "named arguments must follow bare *"), + ("f(g()=2", "expression cannot contain assignment, perhaps you meant \"==\"?"), + ("f(a, b, *c, d.e=2", "expression cannot contain assignment, perhaps you meant \"==\"?"), + ("f(*a, **b, c=0, d[1]=3)", "expression cannot contain assignment, perhaps you meant \"==\"?"), ] GOOD_BUT_FAIL_TEST_CASES = [ diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index b1da16640aa6e..3a08abbca581c 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -210,161 +210,162 @@ static KeywordToken *reserved_keywords[] = { #define t_lookahead_type 1139 #define t_atom_type 1140 #define incorrect_arguments_type 1141 -#define invalid_named_expression_type 1142 -#define invalid_assignment_type 1143 -#define invalid_block_type 1144 -#define invalid_comprehension_type 1145 -#define invalid_parameters_type 1146 -#define invalid_star_etc_type 1147 -#define invalid_lambda_star_etc_type 1148 -#define invalid_double_type_comments_type 1149 -#define _loop0_1_type 1150 -#define _loop0_2_type 1151 -#define _loop0_4_type 1152 -#define _gather_3_type 1153 -#define _loop0_6_type 1154 -#define _gather_5_type 1155 -#define _loop0_8_type 1156 -#define _gather_7_type 1157 -#define _loop0_10_type 1158 -#define _gather_9_type 1159 -#define _loop1_11_type 1160 -#define _loop0_13_type 1161 -#define _gather_12_type 1162 -#define _tmp_14_type 1163 -#define _tmp_15_type 1164 -#define _tmp_16_type 1165 -#define _tmp_17_type 1166 -#define _tmp_18_type 1167 -#define _tmp_19_type 1168 -#define _tmp_20_type 1169 -#define _tmp_21_type 1170 -#define _loop1_22_type 1171 -#define _tmp_23_type 1172 -#define _tmp_24_type 1173 -#define _loop0_26_type 1174 -#define _gather_25_type 1175 -#define _loop0_28_type 1176 -#define _gather_27_type 1177 -#define _tmp_29_type 1178 -#define _loop0_30_type 1179 -#define _loop1_31_type 1180 -#define _loop0_33_type 1181 -#define _gather_32_type 1182 -#define _tmp_34_type 1183 -#define _loop0_36_type 1184 -#define _gather_35_type 1185 -#define _tmp_37_type 1186 -#define _loop0_39_type 1187 -#define _gather_38_type 1188 -#define _loop0_41_type 1189 -#define _gather_40_type 1190 -#define _loop0_43_type 1191 -#define _gather_42_type 1192 -#define _loop0_45_type 1193 -#define _gather_44_type 1194 -#define _tmp_46_type 1195 -#define _loop1_47_type 1196 -#define _tmp_48_type 1197 -#define _tmp_49_type 1198 -#define _tmp_50_type 1199 -#define _tmp_51_type 1200 -#define _tmp_52_type 1201 -#define _loop0_53_type 1202 -#define _loop0_54_type 1203 -#define _loop0_55_type 1204 -#define _loop1_56_type 1205 -#define _loop0_57_type 1206 -#define _loop1_58_type 1207 -#define _loop1_59_type 1208 -#define _loop1_60_type 1209 -#define _loop0_61_type 1210 -#define _loop1_62_type 1211 -#define _loop0_63_type 1212 -#define _loop1_64_type 1213 -#define _loop0_65_type 1214 -#define _loop1_66_type 1215 -#define _loop1_67_type 1216 -#define _tmp_68_type 1217 -#define _loop0_70_type 1218 -#define _gather_69_type 1219 -#define _loop1_71_type 1220 -#define _loop0_73_type 1221 -#define _gather_72_type 1222 -#define _loop1_74_type 1223 -#define _loop0_75_type 1224 -#define _loop0_76_type 1225 -#define _loop0_77_type 1226 -#define _loop1_78_type 1227 -#define _loop0_79_type 1228 -#define _loop1_80_type 1229 -#define _loop1_81_type 1230 -#define _loop1_82_type 1231 -#define _loop0_83_type 1232 -#define _loop1_84_type 1233 -#define _loop0_85_type 1234 -#define _loop1_86_type 1235 -#define _loop0_87_type 1236 -#define _loop1_88_type 1237 -#define _loop1_89_type 1238 -#define _loop1_90_type 1239 -#define _loop1_91_type 1240 -#define _tmp_92_type 1241 -#define _loop0_94_type 1242 -#define _gather_93_type 1243 -#define _tmp_95_type 1244 -#define _tmp_96_type 1245 -#define _tmp_97_type 1246 -#define _tmp_98_type 1247 -#define _loop1_99_type 1248 -#define _tmp_100_type 1249 -#define _tmp_101_type 1250 -#define _loop0_103_type 1251 -#define _gather_102_type 1252 -#define _loop1_104_type 1253 -#define _loop0_105_type 1254 -#define _loop0_106_type 1255 -#define _tmp_107_type 1256 -#define _tmp_108_type 1257 -#define _loop0_110_type 1258 -#define _gather_109_type 1259 -#define _loop0_112_type 1260 -#define _gather_111_type 1261 -#define _loop0_114_type 1262 -#define _gather_113_type 1263 -#define _loop0_116_type 1264 -#define _gather_115_type 1265 -#define _loop0_117_type 1266 -#define _loop0_119_type 1267 -#define _gather_118_type 1268 -#define _tmp_120_type 1269 -#define _loop0_122_type 1270 -#define _gather_121_type 1271 -#define _loop0_124_type 1272 -#define _gather_123_type 1273 -#define _tmp_125_type 1274 -#define _tmp_126_type 1275 -#define _tmp_127_type 1276 -#define _tmp_128_type 1277 -#define _tmp_129_type 1278 -#define _loop0_130_type 1279 -#define _tmp_131_type 1280 -#define _tmp_132_type 1281 -#define _tmp_133_type 1282 -#define _tmp_134_type 1283 -#define _tmp_135_type 1284 -#define _tmp_136_type 1285 -#define _tmp_137_type 1286 -#define _tmp_138_type 1287 -#define _tmp_139_type 1288 -#define _tmp_140_type 1289 -#define _tmp_141_type 1290 -#define _tmp_142_type 1291 -#define _tmp_143_type 1292 -#define _tmp_144_type 1293 -#define _loop1_145_type 1294 -#define _tmp_146_type 1295 -#define _tmp_147_type 1296 +#define invalid_kwarg_type 1142 +#define invalid_named_expression_type 1143 +#define invalid_assignment_type 1144 +#define invalid_block_type 1145 +#define invalid_comprehension_type 1146 +#define invalid_parameters_type 1147 +#define invalid_star_etc_type 1148 +#define invalid_lambda_star_etc_type 1149 +#define invalid_double_type_comments_type 1150 +#define _loop0_1_type 1151 +#define _loop0_2_type 1152 +#define _loop0_4_type 1153 +#define _gather_3_type 1154 +#define _loop0_6_type 1155 +#define _gather_5_type 1156 +#define _loop0_8_type 1157 +#define _gather_7_type 1158 +#define _loop0_10_type 1159 +#define _gather_9_type 1160 +#define _loop1_11_type 1161 +#define _loop0_13_type 1162 +#define _gather_12_type 1163 +#define _tmp_14_type 1164 +#define _tmp_15_type 1165 +#define _tmp_16_type 1166 +#define _tmp_17_type 1167 +#define _tmp_18_type 1168 +#define _tmp_19_type 1169 +#define _tmp_20_type 1170 +#define _tmp_21_type 1171 +#define _loop1_22_type 1172 +#define _tmp_23_type 1173 +#define _tmp_24_type 1174 +#define _loop0_26_type 1175 +#define _gather_25_type 1176 +#define _loop0_28_type 1177 +#define _gather_27_type 1178 +#define _tmp_29_type 1179 +#define _loop0_30_type 1180 +#define _loop1_31_type 1181 +#define _loop0_33_type 1182 +#define _gather_32_type 1183 +#define _tmp_34_type 1184 +#define _loop0_36_type 1185 +#define _gather_35_type 1186 +#define _tmp_37_type 1187 +#define _loop0_39_type 1188 +#define _gather_38_type 1189 +#define _loop0_41_type 1190 +#define _gather_40_type 1191 +#define _loop0_43_type 1192 +#define _gather_42_type 1193 +#define _loop0_45_type 1194 +#define _gather_44_type 1195 +#define _tmp_46_type 1196 +#define _loop1_47_type 1197 +#define _tmp_48_type 1198 +#define _tmp_49_type 1199 +#define _tmp_50_type 1200 +#define _tmp_51_type 1201 +#define _tmp_52_type 1202 +#define _loop0_53_type 1203 +#define _loop0_54_type 1204 +#define _loop0_55_type 1205 +#define _loop1_56_type 1206 +#define _loop0_57_type 1207 +#define _loop1_58_type 1208 +#define _loop1_59_type 1209 +#define _loop1_60_type 1210 +#define _loop0_61_type 1211 +#define _loop1_62_type 1212 +#define _loop0_63_type 1213 +#define _loop1_64_type 1214 +#define _loop0_65_type 1215 +#define _loop1_66_type 1216 +#define _loop1_67_type 1217 +#define _tmp_68_type 1218 +#define _loop0_70_type 1219 +#define _gather_69_type 1220 +#define _loop1_71_type 1221 +#define _loop0_73_type 1222 +#define _gather_72_type 1223 +#define _loop1_74_type 1224 +#define _loop0_75_type 1225 +#define _loop0_76_type 1226 +#define _loop0_77_type 1227 +#define _loop1_78_type 1228 +#define _loop0_79_type 1229 +#define _loop1_80_type 1230 +#define _loop1_81_type 1231 +#define _loop1_82_type 1232 +#define _loop0_83_type 1233 +#define _loop1_84_type 1234 +#define _loop0_85_type 1235 +#define _loop1_86_type 1236 +#define _loop0_87_type 1237 +#define _loop1_88_type 1238 +#define _loop1_89_type 1239 +#define _loop1_90_type 1240 +#define _loop1_91_type 1241 +#define _tmp_92_type 1242 +#define _loop0_94_type 1243 +#define _gather_93_type 1244 +#define _tmp_95_type 1245 +#define _tmp_96_type 1246 +#define _tmp_97_type 1247 +#define _tmp_98_type 1248 +#define _loop1_99_type 1249 +#define _tmp_100_type 1250 +#define _tmp_101_type 1251 +#define _loop0_103_type 1252 +#define _gather_102_type 1253 +#define _loop1_104_type 1254 +#define _loop0_105_type 1255 +#define _loop0_106_type 1256 +#define _tmp_107_type 1257 +#define _tmp_108_type 1258 +#define _loop0_110_type 1259 +#define _gather_109_type 1260 +#define _loop0_112_type 1261 +#define _gather_111_type 1262 +#define _loop0_114_type 1263 +#define _gather_113_type 1264 +#define _loop0_116_type 1265 +#define _gather_115_type 1266 +#define _loop0_117_type 1267 +#define _loop0_119_type 1268 +#define _gather_118_type 1269 +#define _tmp_120_type 1270 +#define _loop0_122_type 1271 +#define _gather_121_type 1272 +#define _loop0_124_type 1273 +#define _gather_123_type 1274 +#define _tmp_125_type 1275 +#define _tmp_126_type 1276 +#define _tmp_127_type 1277 +#define _tmp_128_type 1278 +#define _tmp_129_type 1279 +#define _loop0_130_type 1280 +#define _tmp_131_type 1281 +#define _tmp_132_type 1282 +#define _tmp_133_type 1283 +#define _tmp_134_type 1284 +#define _tmp_135_type 1285 +#define _tmp_136_type 1286 +#define _tmp_137_type 1287 +#define _tmp_138_type 1288 +#define _tmp_139_type 1289 +#define _tmp_140_type 1290 +#define _tmp_141_type 1291 +#define _tmp_142_type 1292 +#define _tmp_143_type 1293 +#define _tmp_144_type 1294 +#define _loop1_145_type 1295 +#define _tmp_146_type 1296 +#define _tmp_147_type 1297 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -508,6 +509,7 @@ static expr_ty t_primary_rule(Parser *p); static void *t_lookahead_rule(Parser *p); static expr_ty t_atom_rule(Parser *p); static void *incorrect_arguments_rule(Parser *p); +static void *invalid_kwarg_rule(Parser *p); static void *invalid_named_expression_rule(Parser *p); static void *invalid_assignment_rule(Parser *p); static void *invalid_block_rule(Parser *p); @@ -9079,7 +9081,7 @@ starred_expression_rule(Parser *p) return res; } -// kwarg_or_starred: NAME '=' expression | starred_expression +// kwarg_or_starred: NAME '=' expression | starred_expression | invalid_kwarg static KeywordOrStarred* kwarg_or_starred_rule(Parser *p) { @@ -9140,12 +9142,23 @@ kwarg_or_starred_rule(Parser *p) } p->mark = mark; } + { // invalid_kwarg + void *invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + res = invalid_kwarg_var; + goto done; + } + p->mark = mark; + } res = NULL; done: return res; } -// kwarg_or_double_starred: NAME '=' expression | '**' expression +// kwarg_or_double_starred: NAME '=' expression | '**' expression | invalid_kwarg static KeywordOrStarred* kwarg_or_double_starred_rule(Parser *p) { @@ -9217,6 +9230,17 @@ kwarg_or_double_starred_rule(Parser *p) } p->mark = mark; } + { // invalid_kwarg + void *invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + res = invalid_kwarg_var; + goto done; + } + p->mark = mark; + } res = NULL; done: return res; @@ -10561,6 +10585,38 @@ incorrect_arguments_rule(Parser *p) return res; } +// invalid_kwarg: expression '=' +static void * +invalid_kwarg_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * res = NULL; + int mark = p->mark; + { // expression '=' + expr_ty expression_var; + Token * literal; + if ( + (expression_var = expression_rule(p)) // expression + && + (literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + res = RAISE_SYNTAX_ERROR ( "expression cannot contain assignment, perhaps you meant \"==\"?" ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + res = NULL; + done: + return res; +} + // invalid_named_expression: expression ':=' expression static void * invalid_named_expression_rule(Parser *p) From webhook-mailer at python.org Thu May 7 09:40:07 2020 From: webhook-mailer at python.org (Petr Viktorin) Date: Thu, 07 May 2020 13:40:07 -0000 Subject: [Python-checkins] bpo-38787: C API for module state access from extension methods (PEP 573) (GH-19936) Message-ID: https://github.com/python/cpython/commit/e1becf46b4e3ba6d7d32ebf4bbd3e0804766a423 commit: e1becf46b4e3ba6d7d32ebf4bbd3e0804766a423 branch: master author: Petr Viktorin committer: GitHub date: 2020-05-07T15:39:59+02:00 summary: bpo-38787: C API for module state access from extension methods (PEP 573) (GH-19936) Module C state is now accessible from C-defined heap type methods (PEP 573). Patch by Marcel Plch and Petr Viktorin. Co-authored-by: Marcel Plch Co-authored-by: Victor Stinner files: A Include/cpython/methodobject.h A Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst A Modules/clinic/_testmultiphase.c.h M Doc/c-api/structures.rst M Doc/c-api/type.rst M Include/cpython/object.h M Include/methodobject.h M Include/object.h M Lib/test/test_capi.py M Lib/test/test_sys.py M Makefile.pre.in M Modules/_testmultiphase.c M Objects/descrobject.c M Objects/methodobject.c M Objects/object.c M Objects/typeobject.c M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters M Tools/clinic/clinic.py diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index fc3467bee4d3c..72c94459295c4 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -147,23 +147,56 @@ Implementing functions and methods value of the function as exposed in Python. The function must return a new reference. + The function signature is:: + + PyObject *PyCFunction(PyObject *self, + PyObject *const *args); .. c:type:: PyCFunctionWithKeywords Type of the functions used to implement Python callables in C with signature :const:`METH_VARARGS | METH_KEYWORDS`. + The function signature is:: + + PyObject *PyCFunctionWithKeywords(PyObject *self, + PyObject *const *args, + PyObject *kwargs); .. c:type:: _PyCFunctionFast Type of the functions used to implement Python callables in C with signature :const:`METH_FASTCALL`. + The function signature is:: + PyObject *_PyCFunctionFast(PyObject *self, + PyObject *const *args, + Py_ssize_t nargs); .. c:type:: _PyCFunctionFastWithKeywords Type of the functions used to implement Python callables in C with signature :const:`METH_FASTCALL | METH_KEYWORDS`. + The function signature is:: + + PyObject *_PyCFunctionFastWithKeywords(PyObject *self, + PyObject *const *args, + Py_ssize_t nargs, + PyObject *kwnames); + +.. c:type:: PyCMethod + + Type of the functions used to implement Python callables in C + with signature :const:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS`. + The function signature is:: + + PyObject *PyCMethod(PyObject *self, + PyTypeObject *defining_class, + PyObject *const *args, + Py_ssize_t nargs, + PyObject *kwnames) + + .. versionadded:: 3.9 .. c:type:: PyMethodDef @@ -197,9 +230,7 @@ The :attr:`ml_flags` field is a bitfield which can include the following flags. The individual flags indicate either a calling convention or a binding convention. -There are four basic calling conventions for positional arguments -and two of them can be combined with :const:`METH_KEYWORDS` to support -also keyword arguments. So there are a total of 6 calling conventions: +There are these calling conventions: .. data:: METH_VARARGS @@ -250,6 +281,19 @@ also keyword arguments. So there are a total of 6 calling conventions: .. versionadded:: 3.7 +.. data:: METH_METHOD | METH_FASTCALL | METH_KEYWORDS + + Extension of :const:`METH_FASTCALL | METH_KEYWORDS` supporting the *defining + class*, that is, the class that contains the method in question. + The defining class might be a superclass of ``Py_TYPE(self)``. + + The method needs to be of type :c:type:`PyCMethod`, the same as for + ``METH_FASTCALL | METH_KEYWORDS`` with ``defining_class`` argument added after + ``self``. + + .. versionadded:: 3.9 + + .. data:: METH_NOARGS Methods without parameters don't need to check whether arguments are given if diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index f774ca35edab9..7dd393f47f1b4 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -109,6 +109,30 @@ Type Objects .. versionadded:: 3.4 +.. c:function:: PyObject* PyType_GetModule(PyTypeObject *type) + + Return the module object associated with the given type when the type was + created using :c:func:`PyType_FromModuleAndSpec`. + + If no module is associated with the given type, sets :py:class:`TypeError` + and returns ``NULL``. + + .. versionadded:: 3.9 + +.. c:function:: void* PyType_GetModuleState(PyTypeObject *type) + + Return the state of the module object associated with the given type. + This is a shortcut for calling :c:func:`PyModule_GetState()` on the result + of :c:func:`PyType_GetModule`. + + If no module is associated with the given type, sets :py:class:`TypeError` + and returns ``NULL``. + + If the *type* has an associated module but its state is ``NULL``, + returns ``NULL`` without setting an exception. + + .. versionadded:: 3.9 + Creating Heap-Allocated Types ............................. @@ -116,7 +140,7 @@ Creating Heap-Allocated Types The following functions and structs are used to create :ref:`heap types `. -.. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) +.. c:function:: PyObject* PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) Creates and returns a heap type object from the *spec* (:const:`Py_TPFLAGS_HEAPTYPE`). @@ -127,8 +151,18 @@ The following functions and structs are used to create If *bases* is ``NULL``, the *Py_tp_base* slot is used instead. If that also is ``NULL``, the new type derives from :class:`object`. + The *module* must be a module object or ``NULL``. + If not ``NULL``, the module is associated with the new type and can later be + retreived with :c:func:`PyType_GetModule`. + This function calls :c:func:`PyType_Ready` on the new type. + .. versionadded:: 3.9 + +.. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) + + Equivalent to ``PyType_FromModuleAndSpec(NULL, spec, bases)``. + .. versionadded:: 3.3 .. c:function:: PyObject* PyType_FromSpec(PyType_Spec *spec) diff --git a/Include/cpython/methodobject.h b/Include/cpython/methodobject.h new file mode 100644 index 0000000000000..2ac2cbf36aa79 --- /dev/null +++ b/Include/cpython/methodobject.h @@ -0,0 +1,32 @@ +#ifndef Py_CPYTHON_METHODOBJECT_H +# error "this header file must not be included directly" +#endif + +PyAPI_DATA(PyTypeObject) PyCMethod_Type; + +/* Macros for direct access to these values. Type checks are *not* + done, so use with care. */ +#define PyCFunction_GET_FUNCTION(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_meth) +#define PyCFunction_GET_SELF(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \ + NULL : ((PyCFunctionObject *)func) -> m_self) +#define PyCFunction_GET_FLAGS(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_flags) +#define PyCFunction_GET_CLASS(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_METHOD ? \ + ((PyCMethodObject *)func) -> mm_class : NULL) + +typedef struct { + PyObject_HEAD + PyMethodDef *m_ml; /* Description of the C function to call */ + PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */ + PyObject *m_module; /* The __module__ attribute, can be anything */ + PyObject *m_weakreflist; /* List of weak references */ + vectorcallfunc vectorcall; +} PyCFunctionObject; + +typedef struct { + PyCFunctionObject func; + PyTypeObject *mm_class; /* Class that defines this method */ +} PyCMethodObject; diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 45da752ed2e94..8bf05a3271183 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -289,6 +289,7 @@ typedef struct _heaptypeobject { PyBufferProcs as_buffer; PyObject *ht_name, *ht_slots, *ht_qualname; struct _dictkeysobject *ht_cached_keys; + PyObject *ht_module; /* here are optional user slots, followed by the members. */ } PyHeapTypeObject; diff --git a/Include/methodobject.h b/Include/methodobject.h index adb2d9e884fbb..7c7362cded35b 100644 --- a/Include/methodobject.h +++ b/Include/methodobject.h @@ -13,7 +13,7 @@ extern "C" { PyAPI_DATA(PyTypeObject) PyCFunction_Type; -#define PyCFunction_Check(op) Py_IS_TYPE(op, &PyCFunction_Type) +#define PyCFunction_Check(op) (Py_IS_TYPE(op, &PyCFunction_Type) || (PyType_IsSubtype(Py_TYPE(op), &PyCFunction_Type))) typedef PyObject *(*PyCFunction)(PyObject *, PyObject *); typedef PyObject *(*_PyCFunctionFast) (PyObject *, PyObject *const *, Py_ssize_t); @@ -22,21 +22,13 @@ typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *, typedef PyObject *(*_PyCFunctionFastWithKeywords) (PyObject *, PyObject *const *, Py_ssize_t, PyObject *); +typedef PyObject *(*PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, + size_t, PyObject *); + PyAPI_FUNC(PyCFunction) PyCFunction_GetFunction(PyObject *); PyAPI_FUNC(PyObject *) PyCFunction_GetSelf(PyObject *); PyAPI_FUNC(int) PyCFunction_GetFlags(PyObject *); -/* Macros for direct access to these values. Type checks are *not* - done, so use with care. */ -#ifndef Py_LIMITED_API -#define PyCFunction_GET_FUNCTION(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_meth) -#define PyCFunction_GET_SELF(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \ - NULL : ((PyCFunctionObject *)func) -> m_self) -#define PyCFunction_GET_FLAGS(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_flags) -#endif Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyCFunction_Call(PyObject *, PyObject *, PyObject *); struct PyMethodDef { @@ -52,6 +44,13 @@ typedef struct PyMethodDef PyMethodDef; PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, PyObject *); +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000 +#define PyCFunction_NewEx(ML, SELF, MOD) PyCMethod_New((ML), (SELF), (MOD), NULL) +PyAPI_FUNC(PyObject *) PyCMethod_New(PyMethodDef *, PyObject *, + PyObject *, PyTypeObject *); +#endif + + /* Flag passed to newmethodobject */ /* #define METH_OLDARGS 0x0000 -- unsupported now */ #define METH_VARARGS 0x0001 @@ -84,15 +83,24 @@ PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, #define METH_STACKLESS 0x0000 #endif +/* METH_METHOD means the function stores an + * additional reference to the class that defines it; + * both self and class are passed to it. + * It uses PyCMethodObject instead of PyCFunctionObject. + * May not be combined with METH_NOARGS, METH_O, METH_CLASS or METH_STATIC. + */ + +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000 +#define METH_METHOD 0x0200 +#endif + + #ifndef Py_LIMITED_API -typedef struct { - PyObject_HEAD - PyMethodDef *m_ml; /* Description of the C function to call */ - PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */ - PyObject *m_module; /* The __module__ attribute, can be anything */ - PyObject *m_weakreflist; /* List of weak references */ - vectorcallfunc vectorcall; -} PyCFunctionObject; + +#define Py_CPYTHON_METHODOBJECT_H +#include "cpython/methodobject.h" +#undef Py_CPYTHON_METHODOBJECT_H + #endif #ifdef __cplusplus diff --git a/Include/object.h b/Include/object.h index 6c30809124dea..514d934196f57 100644 --- a/Include/object.h +++ b/Include/object.h @@ -213,6 +213,11 @@ PyAPI_FUNC(PyObject*) PyType_FromSpecWithBases(PyType_Spec*, PyObject*); #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03040000 PyAPI_FUNC(void*) PyType_GetSlot(PyTypeObject*, int); #endif +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000 +PyAPI_FUNC(PyObject*) PyType_FromModuleAndSpec(PyObject *, PyType_Spec *, PyObject *); +PyAPI_FUNC(PyObject *) PyType_GetModule(struct _typeobject *); +PyAPI_FUNC(void *) PyType_GetModuleState(struct _typeobject *); +#endif /* Generic type check */ PyAPI_FUNC(int) PyType_IsSubtype(PyTypeObject *, PyTypeObject *); diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index f9578d3afa81f..5c7526aa7ec29 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -13,6 +13,8 @@ import time import unittest import weakref +import importlib.machinery +import importlib.util from test import support from test.support import MISSING_C_DOCSTRINGS from test.support.script_helper import assert_python_failure, assert_python_ok @@ -774,5 +776,76 @@ class PyMemDefaultTests(PyMemDebugTests): PYTHONMALLOC = '' +class Test_ModuleStateAccess(unittest.TestCase): + """Test access to module start (PEP 573)""" + + # The C part of the tests lives in _testmultiphase, in a module called + # _testmultiphase_meth_state_access. + # This module has multi-phase initialization, unlike _testcapi. + + def setUp(self): + fullname = '_testmultiphase_meth_state_access' # XXX + origin = importlib.util.find_spec('_testmultiphase').origin + loader = importlib.machinery.ExtensionFileLoader(fullname, origin) + spec = importlib.util.spec_from_loader(fullname, loader) + module = importlib.util.module_from_spec(spec) + loader.exec_module(module) + self.module = module + + def test_subclass_get_module(self): + """PyType_GetModule for defining_class""" + class StateAccessType_Subclass(self.module.StateAccessType): + pass + + instance = StateAccessType_Subclass() + self.assertIs(instance.get_defining_module(), self.module) + + def test_subclass_get_module_with_super(self): + class StateAccessType_Subclass(self.module.StateAccessType): + def get_defining_module(self): + return super().get_defining_module() + + instance = StateAccessType_Subclass() + self.assertIs(instance.get_defining_module(), self.module) + + def test_state_access(self): + """Checks methods defined with and without argument clinic + + This tests a no-arg method (get_count) and a method with + both a positional and keyword argument. + """ + + a = self.module.StateAccessType() + b = self.module.StateAccessType() + + methods = { + 'clinic': a.increment_count_clinic, + 'noclinic': a.increment_count_noclinic, + } + + for name, increment_count in methods.items(): + with self.subTest(name): + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 0) + + increment_count() + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 1) + + increment_count(3) + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 4) + + increment_count(-2, twice=True) + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 0) + + with self.assertRaises(TypeError): + increment_count(thrice=3) + + with self.assertRaises(TypeError): + increment_count(1, 2, 3) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 91a645b460ec0..33b34593a0af9 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1322,7 +1322,7 @@ def delx(self): del self.__x '3P' # PyMappingMethods '10P' # PySequenceMethods '2P' # PyBufferProcs - '4P') + '5P') class newstyleclass(object): pass # Separate block for PyDictKeysObject with 8 keys and 5 entries check(newstyleclass, s + calcsize("2nP2n0P") + 8 + 5*calcsize("n2P")) diff --git a/Makefile.pre.in b/Makefile.pre.in index 3cb8b84157f0e..0d616d304484c 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1104,6 +1104,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/initconfig.h \ $(srcdir)/Include/cpython/interpreteridobject.h \ $(srcdir)/Include/cpython/listobject.h \ + $(srcdir)/Include/cpython/methodobject.h \ $(srcdir)/Include/cpython/object.h \ $(srcdir)/Include/cpython/objimpl.h \ $(srcdir)/Include/cpython/pyerrors.h \ diff --git a/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst b/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst new file mode 100644 index 0000000000000..785ea323c316d --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst @@ -0,0 +1,2 @@ +Module C state is now accessible from C-defined heap type methods (:pep:`573`). +Patch by Marcel Plch and Petr Viktorin. diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index eadc46fbf1867..3084fc12a5ef5 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -4,6 +4,19 @@ #include "Python.h" +/* State for testing module state access from methods */ + +typedef struct { + int counter; +} meth_state; + +/*[clinic input] +module _testmultiphase + +class _testmultiphase.StateAccessType "StateAccessTypeObject *" "!StateAccessType" +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=bab9f2fe3bd312ff]*/ + /* Example objects */ typedef struct { PyObject_HEAD @@ -14,6 +27,10 @@ typedef struct { PyObject *integer; } testmultiphase_state; +typedef struct { + PyObject_HEAD +} StateAccessTypeObject; + /* Example methods */ static int @@ -42,6 +59,7 @@ Example_demo(ExampleObject *self, PyObject *args) Py_RETURN_NONE; } +#include "clinic/_testmultiphase.c.h" static PyMethodDef Example_methods[] = { {"demo", (PyCFunction)Example_demo, METH_VARARGS, @@ -102,6 +120,150 @@ static PyType_Spec Example_Type_spec = { Example_Type_slots }; + +/*[clinic input] +_testmultiphase.StateAccessType.get_defining_module + + cls: defining_class + +Return the module of the defining class. +[clinic start generated code]*/ + +static PyObject * +_testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject *self, + PyTypeObject *cls) +/*[clinic end generated code: output=ba2a14284a5d0921 input=946149f91cf72c0d]*/ +{ + PyObject *retval; + retval = PyType_GetModule(cls); + if (retval == NULL) { + return NULL; + } + Py_INCREF(retval); + return retval; +} + +/*[clinic input] +_testmultiphase.StateAccessType.increment_count_clinic + + cls: defining_class + / + n: int = 1 + * + twice: bool = False + +Add 'n' from the module-state counter. + +Pass 'twice' to double that amount. + +This tests Argument Clinic support for defining_class. +[clinic start generated code]*/ + +static PyObject * +_testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObject *self, + PyTypeObject *cls, + int n, int twice) +/*[clinic end generated code: output=3b34f86bc5473204 input=551d482e1fe0b8f5]*/ +{ + meth_state *m_state = PyType_GetModuleState(cls); + if (twice) { + n *= 2; + } + m_state->counter += n; + + Py_RETURN_NONE; +} + +PyDoc_STRVAR(_StateAccessType_decrement_count__doc__, +"decrement_count($self, /, n=1, *, twice=None)\n" +"--\n" +"\n" +"Add 'n' from the module-state counter.\n" +"Pass 'twice' to double that amount.\n" +"(This is to test both positional and keyword arguments."); + +// Intentionally does not use Argument Clinic +static PyObject * +_StateAccessType_increment_count_noclinic(StateAccessTypeObject *self, + PyTypeObject *defining_class, + PyObject *const *args, + Py_ssize_t nargs, + PyObject *kwnames) +{ + if (!_PyArg_CheckPositional("StateAccessTypeObject.decrement_count", nargs, 0, 1)) { + return NULL; + } + long n = 1; + if (nargs) { + n = PyLong_AsLong(args[0]); + if (PyErr_Occurred()) { + return NULL; + } + } + if (kwnames && PyTuple_Check(kwnames)) { + if (PyTuple_GET_SIZE(kwnames) > 1 || + PyUnicode_CompareWithASCIIString( + PyTuple_GET_ITEM(kwnames, 0), + "twice" + )) { + PyErr_SetString( + PyExc_TypeError, + "decrement_count only takes 'twice' keyword argument" + ); + return NULL; + } + n *= 2; + } + meth_state *m_state = PyType_GetModuleState(defining_class); + m_state->counter += n; + + Py_RETURN_NONE; +} + +/*[clinic input] +_testmultiphase.StateAccessType.get_count + + cls: defining_class + +Return the value of the module-state counter. +[clinic start generated code]*/ + +static PyObject * +_testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self, + PyTypeObject *cls) +/*[clinic end generated code: output=64600f95b499a319 input=d5d181f12384849f]*/ +{ + meth_state *m_state = PyType_GetModuleState(cls); + return PyLong_FromLong(m_state->counter); +} + +static PyMethodDef StateAccessType_methods[] = { + _TESTMULTIPHASE_STATEACCESSTYPE_GET_DEFINING_MODULE_METHODDEF + _TESTMULTIPHASE_STATEACCESSTYPE_GET_COUNT_METHODDEF + _TESTMULTIPHASE_STATEACCESSTYPE_INCREMENT_COUNT_CLINIC_METHODDEF + { + "increment_count_noclinic", + (PyCFunction)(void(*)(void))_StateAccessType_increment_count_noclinic, + METH_METHOD|METH_FASTCALL|METH_KEYWORDS, + _StateAccessType_decrement_count__doc__ + }, + {NULL, NULL} /* sentinel */ +}; + +static PyType_Slot StateAccessType_Type_slots[] = { + {Py_tp_doc, "Type for testing per-module state access from methods."}, + {Py_tp_methods, StateAccessType_methods}, + {0, NULL} +}; + +static PyType_Spec StateAccessType_spec = { + "_testimportexec.StateAccessType", + sizeof(StateAccessTypeObject), + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_FINALIZE | Py_TPFLAGS_BASETYPE, + StateAccessType_Type_slots +}; + /* Function of two integers returning integer */ PyDoc_STRVAR(testexport_foo_doc, @@ -193,30 +355,39 @@ static int execfunc(PyObject *m) /* Add a custom type */ temp = PyType_FromSpec(&Example_Type_spec); - if (temp == NULL) + if (temp == NULL) { goto fail; - if (PyModule_AddObject(m, "Example", temp) != 0) + } + if (PyModule_AddObject(m, "Example", temp) != 0) { goto fail; + } + /* Add an exception type */ temp = PyErr_NewException("_testimportexec.error", NULL, NULL); - if (temp == NULL) + if (temp == NULL) { goto fail; - if (PyModule_AddObject(m, "error", temp) != 0) + } + if (PyModule_AddObject(m, "error", temp) != 0) { goto fail; + } /* Add Str */ temp = PyType_FromSpec(&Str_Type_spec); - if (temp == NULL) + if (temp == NULL) { goto fail; - if (PyModule_AddObject(m, "Str", temp) != 0) + } + if (PyModule_AddObject(m, "Str", temp) != 0) { goto fail; + } - if (PyModule_AddIntConstant(m, "int_const", 1969) != 0) + if (PyModule_AddIntConstant(m, "int_const", 1969) != 0) { goto fail; + } - if (PyModule_AddStringConstant(m, "str_const", "something different") != 0) + if (PyModule_AddStringConstant(m, "str_const", "something different") != 0) { goto fail; + } return 0; fail: @@ -620,6 +791,54 @@ PyInit__testmultiphase_exec_unreported_exception(PyObject *spec) return PyModuleDef_Init(&def_exec_unreported_exception); } +static int +meth_state_access_exec(PyObject *m) +{ + PyObject *temp; + meth_state *m_state; + + m_state = PyModule_GetState(m); + if (m_state == NULL) { + return -1; + } + + temp = PyType_FromModuleAndSpec(m, &StateAccessType_spec, NULL); + if (temp == NULL) { + return -1; + } + if (PyModule_AddObject(m, "StateAccessType", temp) != 0) { + return -1; + } + + + return 0; +} + +static PyModuleDef_Slot meth_state_access_slots[] = { + {Py_mod_exec, meth_state_access_exec}, + {0, NULL} +}; + +static PyModuleDef def_meth_state_access = { + PyModuleDef_HEAD_INIT, /* m_base */ + "_testmultiphase_meth_state_access", /* m_name */ + PyDoc_STR("Module testing access" + " to state from methods."), + sizeof(meth_state), /* m_size */ + NULL, /* m_methods */ + meth_state_access_slots, /* m_slots */ + 0, /* m_traverse */ + 0, /* m_clear */ + 0, /* m_free */ +}; + +PyMODINIT_FUNC +PyInit__testmultiphase_meth_state_access(PyObject *spec) +{ + return PyModuleDef_Init(&def_meth_state_access); +} + + /*** Helper for imp test ***/ static PyModuleDef imp_dummy_def = TEST_MODULE_DEF("imp_dummy", main_slots, testexport_methods); diff --git a/Modules/clinic/_testmultiphase.c.h b/Modules/clinic/_testmultiphase.c.h new file mode 100644 index 0000000000000..0d38c230f7186 --- /dev/null +++ b/Modules/clinic/_testmultiphase.c.h @@ -0,0 +1,101 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_testmultiphase_StateAccessType_get_defining_module__doc__, +"get_defining_module($self, /)\n" +"--\n" +"\n" +"Return the module of the defining class."); + +#define _TESTMULTIPHASE_STATEACCESSTYPE_GET_DEFINING_MODULE_METHODDEF \ + {"get_defining_module", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_get_defining_module, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_get_defining_module__doc__}, + +static PyObject * +_testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject *self, + PyTypeObject *cls); + +static PyObject * +_testmultiphase_StateAccessType_get_defining_module(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":get_defining_module", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _testmultiphase_StateAccessType_get_defining_module_impl(self, cls); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testmultiphase_StateAccessType_increment_count_clinic__doc__, +"increment_count_clinic($self, /, n=1, *, twice=False)\n" +"--\n" +"\n" +"Add \'n\' from the module-state counter.\n" +"\n" +"Pass \'twice\' to double that amount.\n" +"\n" +"This tests Argument Clinic support for defining_class."); + +#define _TESTMULTIPHASE_STATEACCESSTYPE_INCREMENT_COUNT_CLINIC_METHODDEF \ + {"increment_count_clinic", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_increment_count_clinic, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_increment_count_clinic__doc__}, + +static PyObject * +_testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObject *self, + PyTypeObject *cls, + int n, int twice); + +static PyObject * +_testmultiphase_StateAccessType_increment_count_clinic(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"n", "twice", NULL}; + static _PyArg_Parser _parser = {"|i$p:increment_count_clinic", _keywords, 0}; + int n = 1; + int twice = 0; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &n, &twice)) { + goto exit; + } + return_value = _testmultiphase_StateAccessType_increment_count_clinic_impl(self, cls, n, twice); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testmultiphase_StateAccessType_get_count__doc__, +"get_count($self, /)\n" +"--\n" +"\n" +"Return the value of the module-state counter."); + +#define _TESTMULTIPHASE_STATEACCESSTYPE_GET_COUNT_METHODDEF \ + {"get_count", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_get_count, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_get_count__doc__}, + +static PyObject * +_testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self, + PyTypeObject *cls); + +static PyObject * +_testmultiphase_StateAccessType_get_count(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":get_count", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _testmultiphase_StateAccessType_get_count_impl(self, cls); + +exit: + return return_value; +} +/*[clinic end generated code: output=39eea487e94e7f5d input=a9049054013a1b77]*/ diff --git a/Objects/descrobject.c b/Objects/descrobject.c index 572baa5e312d2..c9754a11b89be 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -127,7 +127,11 @@ classmethod_get(PyMethodDescrObject *descr, PyObject *obj, PyObject *type) ((PyTypeObject *)type)->tp_name); return NULL; } - return PyCFunction_NewEx(descr->d_method, type, NULL); + PyTypeObject *cls = NULL; + if (descr->d_method->ml_flags & METH_METHOD) { + cls = descr->d_common.d_type; + } + return PyCMethod_New(descr->d_method, type, NULL, cls); } static PyObject * @@ -137,7 +141,19 @@ method_get(PyMethodDescrObject *descr, PyObject *obj, PyObject *type) if (descr_check((PyDescrObject *)descr, obj, &res)) return res; - return PyCFunction_NewEx(descr->d_method, obj, NULL); + if (descr->d_method->ml_flags & METH_METHOD) { + if (PyType_Check(type)) { + return PyCMethod_New(descr->d_method, obj, NULL, descr->d_common.d_type); + } else { + PyErr_Format(PyExc_TypeError, + "descriptor '%V' needs a type, not '%s', as arg 2", + descr_name((PyDescrObject *)descr), + Py_TYPE(type)->tp_name); + return NULL; + } + } else { + return PyCFunction_NewEx(descr->d_method, obj, NULL); + } } static PyObject * @@ -335,6 +351,27 @@ method_vectorcall_VARARGS_KEYWORDS( return result; } +static PyObject * +method_vectorcall_FASTCALL_KEYWORDS_METHOD( + PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + PyThreadState *tstate = _PyThreadState_GET(); + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); + if (method_check_args(func, args, nargs, NULL)) { + return NULL; + } + NULL; + PyCMethod meth = (PyCMethod) method_enter_call(tstate, func); + if (meth == NULL) { + return NULL; + } + PyObject *result = meth(args[0], + ((PyMethodDescrObject *)func)->d_common.d_type, + args+1, nargs-1, kwnames); + Py_LeaveRecursiveCall(); + return result; +} + static PyObject * method_vectorcall_FASTCALL( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) @@ -868,7 +905,8 @@ PyDescr_NewMethod(PyTypeObject *type, PyMethodDef *method) { /* Figure out correct vectorcall function to use */ vectorcallfunc vectorcall; - switch (method->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS)) + switch (method->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | + METH_O | METH_KEYWORDS | METH_METHOD)) { case METH_VARARGS: vectorcall = method_vectorcall_VARARGS; @@ -888,6 +926,9 @@ PyDescr_NewMethod(PyTypeObject *type, PyMethodDef *method) case METH_O: vectorcall = method_vectorcall_O; break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + vectorcall = method_vectorcall_FASTCALL_KEYWORDS_METHOD; + break; default: PyErr_Format(PyExc_SystemError, "%s() method: bad call flags", method->ml_name); diff --git a/Objects/methodobject.c b/Objects/methodobject.c index 20eba6fa8643b..5659f2143d182 100644 --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -10,12 +10,16 @@ /* undefine macro trampoline to PyCFunction_NewEx */ #undef PyCFunction_New +/* undefine macro trampoline to PyCMethod_New */ +#undef PyCFunction_NewEx /* Forward declarations */ static PyObject * cfunction_vectorcall_FASTCALL( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * cfunction_vectorcall_FASTCALL_KEYWORDS( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * cfunction_vectorcall_FASTCALL_KEYWORDS_METHOD( + PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * cfunction_vectorcall_NOARGS( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * cfunction_vectorcall_O( @@ -32,10 +36,17 @@ PyCFunction_New(PyMethodDef *ml, PyObject *self) PyObject * PyCFunction_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module) +{ + return PyCMethod_New(ml, self, module, NULL); +} + +PyObject * +PyCMethod_New(PyMethodDef *ml, PyObject *self, PyObject *module, PyTypeObject *cls) { /* Figure out correct vectorcall function to use */ vectorcallfunc vectorcall; - switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS)) + switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | + METH_O | METH_KEYWORDS | METH_METHOD)) { case METH_VARARGS: case METH_VARARGS | METH_KEYWORDS: @@ -55,17 +66,44 @@ PyCFunction_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module) case METH_O: vectorcall = cfunction_vectorcall_O; break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + vectorcall = cfunction_vectorcall_FASTCALL_KEYWORDS_METHOD; + break; default: PyErr_Format(PyExc_SystemError, "%s() method: bad call flags", ml->ml_name); return NULL; } - PyCFunctionObject *op = - PyObject_GC_New(PyCFunctionObject, &PyCFunction_Type); - if (op == NULL) { - return NULL; + PyCFunctionObject *op = NULL; + + if (ml->ml_flags & METH_METHOD) { + if (!cls) { + PyErr_SetString(PyExc_SystemError, + "attempting to create PyCMethod with a METH_METHOD " + "flag but no class"); + return NULL; + } + PyCMethodObject *om = PyObject_GC_New(PyCMethodObject, &PyCMethod_Type); + if (om == NULL) { + return NULL; + } + Py_INCREF(cls); + om->mm_class = cls; + op = (PyCFunctionObject *)om; + } else { + if (cls) { + PyErr_SetString(PyExc_SystemError, + "attempting to create PyCFunction with class " + "but no METH_METHOD flag"); + return NULL; + } + op = PyObject_GC_New(PyCFunctionObject, &PyCFunction_Type); + if (op == NULL) { + return NULL; + } } + op->m_weakreflist = NULL; op->m_ml = ml; Py_XINCREF(self); @@ -107,6 +145,16 @@ PyCFunction_GetFlags(PyObject *op) return PyCFunction_GET_FLAGS(op); } +PyTypeObject * +PyCMethod_GetClass(PyObject *op) +{ + if (!PyCFunction_Check(op)) { + PyErr_BadInternalCall(); + return NULL; + } + return PyCFunction_GET_CLASS(op); +} + /* Methods (the standard built-in methods, that is) */ static void @@ -118,6 +166,7 @@ meth_dealloc(PyCFunctionObject *m) } Py_XDECREF(m->m_self); Py_XDECREF(m->m_module); + Py_XDECREF(PyCFunction_GET_CLASS(m)); PyObject_GC_Del(m); } @@ -196,6 +245,7 @@ meth_traverse(PyCFunctionObject *m, visitproc visit, void *arg) { Py_VISIT(m->m_self); Py_VISIT(m->m_module); + Py_VISIT(PyCFunction_GET_CLASS(m)); return 0; } @@ -314,6 +364,13 @@ PyTypeObject PyCFunction_Type = { 0, /* tp_dict */ }; +PyTypeObject PyCMethod_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + .tp_name = "builtin_method", + .tp_basicsize = sizeof(PyCMethodObject), + .tp_base = &PyCFunction_Type, +}; + /* Vectorcall functions for each of the PyCFunction calling conventions, * except for METH_VARARGS (possibly combined with METH_KEYWORDS) which * doesn't use vectorcall. @@ -385,6 +442,22 @@ cfunction_vectorcall_FASTCALL_KEYWORDS( return result; } +static PyObject * +cfunction_vectorcall_FASTCALL_KEYWORDS_METHOD( + PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + PyThreadState *tstate = _PyThreadState_GET(); + PyTypeObject *cls = PyCFunction_GET_CLASS(func); + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); + PyCMethod meth = (PyCMethod)cfunction_enter_call(tstate, func); + if (meth == NULL) { + return NULL; + } + PyObject *result = meth(PyCFunction_GET_SELF(func), cls, args, nargs, kwnames); + _Py_LeaveRecursiveCall(tstate); + return result; +} + static PyObject * cfunction_vectorcall_NOARGS( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) diff --git a/Objects/object.c b/Objects/object.c index 75ea92ad9005c..623ee52eb1e22 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -1789,6 +1789,7 @@ _PyTypes_Init(void) INIT_TYPE(&PyCode_Type, "code"); INIT_TYPE(&PyFrame_Type, "frame"); INIT_TYPE(&PyCFunction_Type, "builtin function"); + INIT_TYPE(&PyCMethod_Type, "builtin method"); INIT_TYPE(&PyMethod_Type, "method"); INIT_TYPE(&PyFunction_Type, "function"); INIT_TYPE(&PyDictProxy_Type, "dict proxy"); diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 0d5600b4ce4fa..525f5ac5d5775 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2708,6 +2708,9 @@ type_new(PyTypeObject *metatype, PyObject *args, PyObject *kwds) if (qualname != NULL && _PyDict_DelItemId(dict, &PyId___qualname__) < 0) goto error; + /* Set ht_module */ + et->ht_module = NULL; + /* Set tp_doc to a copy of dict['__doc__'], if the latter is there and is a string. The __doc__ accessor will first look for tp_doc; if that fails, it will still look into __dict__. @@ -2939,6 +2942,12 @@ PyType_FromSpec_tp_traverse(PyObject *self, visitproc visit, void *arg) PyObject * PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) +{ + return PyType_FromModuleAndSpec(NULL, spec, bases); +} + +PyObject * +PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { PyHeapTypeObject *res; PyObject *modname; @@ -2998,6 +3007,9 @@ PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) Py_INCREF(res->ht_qualname); type->tp_name = spec->name; + Py_XINCREF(module); + res->ht_module = module; + /* Adjust for empty tuple bases */ if (!bases) { base = &PyBaseObject_Type; @@ -3176,6 +3188,40 @@ PyType_GetSlot(PyTypeObject *type, int slot) return *(void**)(((char*)type) + slotoffsets[slot]); } +PyObject * +PyType_GetModule(PyTypeObject *type) +{ + assert(PyType_Check(type)); + if (!_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) { + PyErr_Format( + PyExc_TypeError, + "PyType_GetModule: Type '%s' is not a heap type", + type->tp_name); + return NULL; + } + + PyHeapTypeObject* et = (PyHeapTypeObject*)type; + if (!et->ht_module) { + PyErr_Format( + PyExc_TypeError, + "PyType_GetModule: Type '%s' has no associated module", + type->tp_name); + return NULL; + } + return et->ht_module; + +} + +void * +PyType_GetModuleState(PyTypeObject *type) +{ + PyObject *m = PyType_GetModule(type); + if (m == NULL) { + return NULL; + } + return PyModule_GetState(m); +} + /* Internal API to look for a name through the MRO, bypassing the method cache. This returns a borrowed reference, and might set an exception. 'error' is set to: -1: error with exception; 1: error without exception; 0: ok */ @@ -3503,8 +3549,10 @@ type_dealloc(PyTypeObject *type) Py_XDECREF(et->ht_name); Py_XDECREF(et->ht_qualname); Py_XDECREF(et->ht_slots); - if (et->ht_cached_keys) + if (et->ht_cached_keys) { _PyDictKeys_DecRef(et->ht_cached_keys); + } + Py_XDECREF(et->ht_module); Py_TYPE(type)->tp_free((PyObject *)type); } @@ -3694,6 +3742,7 @@ type_traverse(PyTypeObject *type, visitproc visit, void *arg) Py_VISIT(type->tp_mro); Py_VISIT(type->tp_bases); Py_VISIT(type->tp_base); + Py_VISIT(((PyHeapTypeObject *)type)->ht_module); /* There's no need to visit type->tp_subclasses or ((PyHeapTypeObject *)type)->ht_slots, because they can't be involved @@ -3715,10 +3764,13 @@ type_clear(PyTypeObject *type) the dict, so that other objects caught in a reference cycle don't start calling destroyed methods. - Otherwise, the only field we need to clear is tp_mro, which is + Otherwise, the we need to clear tp_mro, which is part of a hard cycle (its first element is the class itself) that won't be broken otherwise (it's a tuple and tuples don't have a - tp_clear handler). None of the other fields need to be + tp_clear handler). + We also need to clear ht_module, if present: the module usually holds a + reference to its class. None of the other fields need to be + cleared, and here's why: tp_cache: @@ -3743,8 +3795,11 @@ type_clear(PyTypeObject *type) ((PyHeapTypeObject *)type)->ht_cached_keys = NULL; _PyDictKeys_DecRef(cached_keys); } - if (type->tp_dict) + if (type->tp_dict) { PyDict_Clear(type->tp_dict); + } + Py_CLEAR(((PyHeapTypeObject *)type)->ht_module); + Py_CLEAR(type->tp_mro); return 0; diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 21b51bf5e6ddc..73274ac9acf55 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -138,6 +138,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index f5c76fa34eb94..254c8fbbea5fb 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -111,6 +111,9 @@ Include + + Include + Include diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index 382e29a28ab48..281a749a935cc 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -657,9 +657,14 @@ def output_templates(self, f): if not p.is_optional(): min_pos = i + requires_defining_class = any( + isinstance(p.converter, defining_class_converter) + for p in parameters) + meth_o = (len(parameters) == 1 and parameters[0].is_positional_only() and not converters[0].is_optional() and + not requires_defining_class and not new_or_init) # we have to set these things before we're done: @@ -717,6 +722,11 @@ def output_templates(self, f): {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) """) + parser_prototype_def_class = normalize_snippet(""" + static PyObject * + {c_basename}({self_type}{self_name}, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) + """) + # parser_body_fields remembers the fields passed in to the # previous call to parser_body. this is used for an awful hack. parser_body_fields = () @@ -824,7 +834,7 @@ def parser_body(prototype, *fields, declarations=''): parser_definition = parser_body(parser_prototype, ' {option_group_parsing}') - elif pos_only == len(parameters): + elif not requires_defining_class and pos_only == len(parameters): if not new_or_init: # positional-only, but no option groups # we only need one call to _PyArg_ParseStack @@ -891,7 +901,7 @@ def parser_body(prototype, *fields, declarations=''): parser_prototype = parser_prototype_fastcall_keywords argname_fmt = 'args[%d]' declarations = normalize_snippet(""" - static const char * const _keywords[] = {{{keywords}, NULL}}; + static const char * const _keywords[] = {{{keywords} NULL}}; static _PyArg_Parser _parser = {{NULL, _keywords, "{name}", 0}}; PyObject *argsbuf[%s]; """ % len(converters)) @@ -909,7 +919,7 @@ def parser_body(prototype, *fields, declarations=''): parser_prototype = parser_prototype_keyword argname_fmt = 'fastargs[%d]' declarations = normalize_snippet(""" - static const char * const _keywords[] = {{{keywords}, NULL}}; + static const char * const _keywords[] = {{{keywords} NULL}}; static _PyArg_Parser _parser = {{NULL, _keywords, "{name}", 0}}; PyObject *argsbuf[%s]; PyObject * const *fastargs; @@ -923,6 +933,9 @@ def parser_body(prototype, *fields, declarations=''): goto exit; }} """ % (min_pos, max_pos, min_kw_only), indent=4)] + if requires_defining_class: + flags = 'METH_METHOD|' + flags + parser_prototype = parser_prototype_def_class add_label = None for i, p in enumerate(parameters): @@ -983,11 +996,11 @@ def parser_body(prototype, *fields, declarations=''): parser_code.append("%s:" % add_label) else: declarations = ( - 'static const char * const _keywords[] = {{{keywords}, NULL}};\n' + 'static const char * const _keywords[] = {{{keywords} NULL}};\n' 'static _PyArg_Parser _parser = {{"{format_units}:{name}", _keywords, 0}};') if not new_or_init: parser_code = [normalize_snippet(""" - if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser{parse_arguments_comma} {parse_arguments})) {{ goto exit; }} @@ -1021,6 +1034,9 @@ def parser_body(prototype, *fields, declarations=''): if parses_keywords: assert parses_positional + if requires_defining_class: + raise ValueError("Slot methods cannot access their defining class.") + if not parses_keywords: fields.insert(0, normalize_snippet(""" if ({self_type_check}!_PyArg_NoKeywords("{name}", kwargs)) {{ @@ -1297,9 +1313,13 @@ def render_function(self, clinic, f): template_dict['declarations'] = format_escape("\n".join(data.declarations)) template_dict['initializers'] = "\n\n".join(data.initializers) template_dict['modifications'] = '\n\n'.join(data.modifications) - template_dict['keywords'] = '"' + '", "'.join(data.keywords) + '"' + template_dict['keywords'] = ' '.join('"' + k + '",' for k in data.keywords) template_dict['format_units'] = ''.join(data.format_units) template_dict['parse_arguments'] = ', '.join(data.parse_arguments) + if data.parse_arguments: + template_dict['parse_arguments_comma'] = ','; + else: + template_dict['parse_arguments_comma'] = ''; template_dict['impl_parameters'] = ", ".join(data.impl_parameters) template_dict['impl_arguments'] = ", ".join(data.impl_arguments) template_dict['return_conversion'] = format_escape("".join(data.return_conversion).rstrip()) @@ -2730,6 +2750,25 @@ def parse_arg(self, argname, displayname): """.format(argname=argname, paramname=self.name) return super().parse_arg(argname, displayname) +class defining_class_converter(CConverter): + """ + A special-case converter: + this is the default converter used for the defining class. + """ + type = 'PyTypeObject *' + format_unit = '' + show_in_signature = False + + def converter_init(self, *, type=None): + self.specified_type = type + + def render(self, parameter, data): + self._render_self(parameter, data) + + def set_template_dict(self, template_dict): + template_dict['defining_class_name'] = self.name + + class char_converter(CConverter): type = 'char' default_type = (bytes, bytearray) @@ -4508,6 +4547,19 @@ def bad_node(self, node): else: fail("A 'self' parameter, if specified, must be the very first thing in the parameter block.") + if isinstance(converter, defining_class_converter): + _lp = len(self.function.parameters) + if _lp == 1: + if (self.parameter_state != self.ps_required): + fail("A 'defining_class' parameter cannot be marked optional.") + if value is not unspecified: + fail("A 'defining_class' parameter cannot have a default value.") + if self.group: + fail("A 'defining_class' parameter cannot be in an optional group.") + else: + fail("A 'defining_class' parameter, if specified, must either be the first thing in the parameter block, or come just after 'self'.") + + p = Parameter(parameter_name, kind, function=self.function, converter=converter, default=value, group=self.group) if parameter_name in self.function.parameters: From webhook-mailer at python.org Thu May 7 09:42:41 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 07 May 2020 13:42:41 -0000 Subject: [Python-checkins] bpo-40545: Export _PyErr_GetTopmostException() function (GH-19978) Message-ID: https://github.com/python/cpython/commit/8963a7f1f84a05412178b56629508b660d38861b commit: 8963a7f1f84a05412178b56629508b660d38861b branch: master author: Victor Stinner committer: GitHub date: 2020-05-07T15:42:33+02:00 summary: bpo-40545: Export _PyErr_GetTopmostException() function (GH-19978) Declare _PyErr_GetTopmostException() with PyAPI_FUNC() to properly export the function in the C API. The function remains private ("_Py") prefix. Co-Authored-By: Julien Danjou files: A Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst M Include/cpython/pyerrors.h diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index cdd052026c1ba..dd3c2caa0cc04 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -75,7 +75,7 @@ typedef PyOSErrorObject PyWindowsErrorObject; /* Error handling definitions */ PyAPI_FUNC(void) _PyErr_SetKeyError(PyObject *); -_PyErr_StackItem *_PyErr_GetTopmostException(PyThreadState *tstate); +PyAPI_FUNC(_PyErr_StackItem*) _PyErr_GetTopmostException(PyThreadState *tstate); PyAPI_FUNC(void) _PyErr_GetExcInfo(PyThreadState *, PyObject **, PyObject **, PyObject **); /* Context manipulation (PEP 3134) */ diff --git a/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst b/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst new file mode 100644 index 0000000000000..d7f256a2a6b52 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst @@ -0,0 +1,3 @@ +Declare ``_PyErr_GetTopmostException()`` with ``PyAPI_FUNC()`` to properly +export the function in the C API. The function remains private (``_Py``) +prefix. From webhook-mailer at python.org Thu May 7 10:56:09 2020 From: webhook-mailer at python.org (Eric Snow) Date: Thu, 07 May 2020 14:56:09 -0000 Subject: [Python-checkins] bpo-32604: [_xxsubinterpreters] Propagate exceptions. (GH-19768) Message-ID: https://github.com/python/cpython/commit/a1d9e0accd33af1d8e90fc48b34c13d7b07dcf57 commit: a1d9e0accd33af1d8e90fc48b34c13d7b07dcf57 branch: master author: Eric Snow committer: GitHub date: 2020-05-07T08:56:01-06:00 summary: bpo-32604: [_xxsubinterpreters] Propagate exceptions. (GH-19768) (Note: PEP 554 is not accepted and the implementation in the code base is a private one for use in the test suite.) If code running in a subinterpreter raises an uncaught exception then the "run" call in the calling interpreter fails. A RunFailedError is raised there that summarizes the original exception as a string. The actual exception type, __cause__, __context__, state, etc. are all discarded. This turned out to be functionally insufficient in practice. There is a more helpful solution (and PEP 554 has been updated appropriately). This change adds the exception propagation behavior described in PEP 554 to the _xxsubinterpreters module. With this change a copy of the original exception is set to __cause__ on the RunFailedError. For now we are using "pickle", which preserves the exception's state. We also preserve the original __cause__, __context__, and __traceback__ (since "pickle" does not preserve those). https://bugs.python.org/issue32604 files: M Lib/test/test__xxsubinterpreters.py M Modules/_xxsubinterpretersmodule.c diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index e17bfde2c2f75..039c040ad3950 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -1,3 +1,4 @@ +import builtins from collections import namedtuple import contextlib import itertools @@ -866,10 +867,11 @@ def assert_run_failed(self, exctype, msg=None): yield if msg is None: self.assertEqual(str(caught.exception).split(':')[0], - str(exctype)) + exctype.__name__) else: self.assertEqual(str(caught.exception), - "{}: {}".format(exctype, msg)) + "{}: {}".format(exctype.__name__, msg)) + self.assertIsInstance(caught.exception.__cause__, exctype) def test_invalid_syntax(self): with self.assert_run_failed(SyntaxError): @@ -1060,6 +1062,301 @@ def f(): self.assertEqual(retcode, 0) +def build_exception(exctype, /, *args, **kwargs): + # XXX Use __qualname__? + name = exctype.__name__ + argreprs = [repr(a) for a in args] + if kwargs: + kwargreprs = [f'{k}={v!r}' for k, v in kwargs.items()] + script = f'{name}({", ".join(argreprs)}, {", ".join(kwargreprs)})' + else: + script = f'{name}({", ".join(argreprs)})' + expected = exctype(*args, **kwargs) + return script, expected + + +def build_exceptions(self, *exctypes, default=None, custom=None, bases=True): + if not exctypes: + raise NotImplementedError + if not default: + default = ((), {}) + elif isinstance(default, str): + default = ((default,), {}) + elif type(default) is not tuple: + raise NotImplementedError + elif len(default) != 2: + default = (default, {}) + elif type(default[0]) is not tuple: + default = (default, {}) + elif type(default[1]) is not dict: + default = (default, {}) + # else leave it alone + + for exctype in exctypes: + customtype = None + values = default + if custom: + if exctype in custom: + customtype = exctype + elif bases: + for customtype in custom: + if issubclass(exctype, customtype): + break + else: + customtype = None + if customtype is not None: + values = custom[customtype] + if values is None: + continue + args, kwargs = values + script, expected = build_exception(exctype, *args, **kwargs) + yield exctype, customtype, script, expected + + +try: + raise Exception +except Exception as exc: + assert exc.__traceback__ is not None + Traceback = type(exc.__traceback__) + + +class RunFailedTests(TestBase): + + BUILTINS = [v + for v in vars(builtins).values() + if (type(v) is type + and issubclass(v, Exception) + #and issubclass(v, BaseException) + ) + ] + BUILTINS_SPECIAL = [ + # These all have extra attributes (i.e. args/kwargs) + SyntaxError, + ImportError, + UnicodeError, + OSError, + SystemExit, + StopIteration, + ] + + @classmethod + def build_exceptions(cls, exctypes=None, default=(), custom=None): + if exctypes is None: + exctypes = cls.BUILTINS + if custom is None: + # Skip the "special" ones. + custom = {et: None for et in cls.BUILTINS_SPECIAL} + yield from build_exceptions(*exctypes, default=default, custom=custom) + + def assertExceptionsEqual(self, exc, expected, *, chained=True): + if type(expected) is type: + self.assertIs(type(exc), expected) + return + elif not isinstance(exc, Exception): + self.assertEqual(exc, expected) + elif not isinstance(expected, Exception): + self.assertEqual(exc, expected) + else: + # Plain equality doesn't work, so we have to compare manually. + self.assertIs(type(exc), type(expected)) + self.assertEqual(exc.args, expected.args) + self.assertEqual(exc.__reduce__(), expected.__reduce__()) + if chained: + self.assertExceptionsEqual(exc.__context__, + expected.__context__) + self.assertExceptionsEqual(exc.__cause__, + expected.__cause__) + self.assertEqual(exc.__suppress_context__, + expected.__suppress_context__) + + def assertTracebacksEqual(self, tb, expected): + if not isinstance(tb, Traceback): + self.assertEqual(tb, expected) + elif not isinstance(expected, Traceback): + self.assertEqual(tb, expected) + else: + self.assertEqual(tb.tb_frame.f_code.co_name, + expected.tb_frame.f_code.co_name) + self.assertEqual(tb.tb_frame.f_code.co_filename, + expected.tb_frame.f_code.co_filename) + self.assertEqual(tb.tb_lineno, expected.tb_lineno) + self.assertTracebacksEqual(tb.tb_next, expected.tb_next) + + # XXX Move this to TestBase? + @contextlib.contextmanager + def expected_run_failure(self, expected): + exctype = expected if type(expected) is type else type(expected) + + with self.assertRaises(interpreters.RunFailedError) as caught: + yield caught + exc = caught.exception + + modname = exctype.__module__ + if modname == 'builtins' or modname == '__main__': + exctypename = exctype.__name__ + else: + exctypename = f'{modname}.{exctype.__name__}' + if exctype is expected: + self.assertEqual(str(exc).split(':')[0], exctypename) + else: + self.assertEqual(str(exc), f'{exctypename}: {expected}') + self.assertExceptionsEqual(exc.__cause__, expected) + if exc.__cause__ is not None: + self.assertIsNotNone(exc.__cause__.__traceback__) + + def test_builtin_exceptions(self): + interpid = interpreters.create() + msg = '' + for i, info in enumerate(self.build_exceptions( + default=msg, + custom={ + SyntaxError: ((msg, '', 1, 3, 'a +?'), {}), + ImportError: ((msg,), {'name': 'spam', 'path': '/x/spam.py'}), + UnicodeError: None, + #UnicodeError: ((), {}), + #OSError: ((), {}), + SystemExit: ((1,), {}), + StopIteration: (('',), {}), + }, + )): + exctype, _, script, expected = info + testname = f'{i+1} - {script}' + script = f'raise {script}' + + with self.subTest(testname): + with self.expected_run_failure(expected): + interpreters.run_string(interpid, script) + + def test_custom_exception_from___main__(self): + script = dedent(""" + class SpamError(Exception): + def __init__(self, q): + super().__init__(f'got {q}') + self.q = q + raise SpamError('eggs') + """) + expected = Exception(f'SpamError: got {"eggs"}') + + interpid = interpreters.create() + with self.assertRaises(interpreters.RunFailedError) as caught: + interpreters.run_string(interpid, script) + cause = caught.exception.__cause__ + + self.assertExceptionsEqual(cause, expected) + + class SpamError(Exception): + # The normal Exception.__reduce__() produces a funny result + # here. So we have to use a custom __new__(). + def __new__(cls, q): + if type(q) is SpamError: + return q + return super().__new__(cls, q) + def __init__(self, q): + super().__init__(f'got {q}') + self.q = q + + def test_custom_exception(self): + script = dedent(""" + import test.test__xxsubinterpreters + SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamError + raise SpamError('eggs') + """) + try: + ns = {} + exec(script, ns, ns) + except Exception as exc: + expected = exc + + interpid = interpreters.create() + with self.expected_run_failure(expected): + interpreters.run_string(interpid, script) + + class SpamReducedError(Exception): + def __init__(self, q): + super().__init__(f'got {q}') + self.q = q + def __reduce__(self): + return (type(self), (self.q,), {}) + + def test_custom___reduce__(self): + script = dedent(""" + import test.test__xxsubinterpreters + SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamReducedError + raise SpamError('eggs') + """) + try: + exec(script, (ns := {'__name__': '__main__'}), ns) + except Exception as exc: + expected = exc + + interpid = interpreters.create() + with self.expected_run_failure(expected): + interpreters.run_string(interpid, script) + + def test_traceback_propagated(self): + script = dedent(""" + def do_spam(): + raise Exception('uh-oh') + def do_eggs(): + return do_spam() + class Spam: + def do(self): + return do_eggs() + def get_handler(): + def handler(): + return Spam().do() + return handler + go = (lambda: get_handler()()) + def iter_all(): + yield from (go() for _ in [True]) + yield None + def main(): + for v in iter_all(): + pass + main() + """) + try: + ns = {} + exec(script, ns, ns) + except Exception as exc: + expected = exc + expectedtb = exc.__traceback__.tb_next + + interpid = interpreters.create() + with self.expected_run_failure(expected) as caught: + interpreters.run_string(interpid, script) + exc = caught.exception + + self.assertTracebacksEqual(exc.__cause__.__traceback__, + expectedtb) + + def test_chained_exceptions(self): + script = dedent(""" + try: + raise ValueError('msg 1') + except Exception as exc1: + try: + raise TypeError('msg 2') + except Exception as exc2: + try: + raise IndexError('msg 3') from exc2 + except Exception: + raise AttributeError('msg 4') + """) + try: + exec(script, {}, {}) + except Exception as exc: + expected = exc + + interpid = interpreters.create() + with self.expected_run_failure(expected) as caught: + interpreters.run_string(interpid, script) + exc = caught.exception + + # ...just to be sure. + self.assertIs(type(exc.__cause__), AttributeError) + + ################################## # channel tests diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 8a6fce9e0b4bd..9c5df16e156a1 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1,5 +1,4 @@ - -/* interpreters module */ +/* _interpreters module */ /* low-level access to interpreter primitives */ #include "Python.h" @@ -7,35 +6,921 @@ #include "interpreteridobject.h" +// XXX Emit a warning? +#define IGNORE_FAILURE(msg) \ + fprintf(stderr, " -----\nRunFailedError: %s\n", msg); \ + PyErr_PrintEx(0); \ + fprintf(stderr, " -----\n"); \ + PyErr_Clear(); + +typedef void (*_deallocfunc)(void *); + +static PyInterpreterState * +_get_current(void) +{ + // _PyInterpreterState_Get() aborts if lookup fails, so don't need + // to check the result for NULL. + return _PyInterpreterState_Get(); +} + + +/* string utils *************************************************************/ + +// PyMem_Free() must be used to dealocate the resulting string. static char * -_copy_raw_string(PyObject *strobj) +_strdup_and_size(const char *data, Py_ssize_t *psize, _deallocfunc *dealloc) { - const char *str = PyUnicode_AsUTF8(strobj); - if (str == NULL) { - return NULL; + if (data == NULL) { + if (psize != NULL) { + *psize = 0; + } + if (dealloc != NULL) { + *dealloc = NULL; + } + return ""; + } + + Py_ssize_t size; + if (psize == NULL) { + size = strlen(data); + } else { + size = *psize; + if (size == 0) { + size = strlen(data); + *psize = size; // The size "return" value. + } } - char *copied = PyMem_Malloc(strlen(str)+1); + char *copied = PyMem_Malloc(size+1); if (copied == NULL) { PyErr_NoMemory(); return NULL; } - strcpy(copied, str); + if (dealloc != NULL) { + *dealloc = PyMem_Free; + } + memcpy(copied, data, size+1); return copied; } -static PyInterpreterState * -_get_current(void) +static const char * +_pyobj_get_str_and_size(PyObject *obj, Py_ssize_t *psize) { - // PyInterpreterState_Get() aborts if lookup fails, so don't need - // to check the result for NULL. - return PyInterpreterState_Get(); + if (PyUnicode_Check(obj)) { + return PyUnicode_AsUTF8AndSize(obj, psize); + } else { + const char *data = NULL; + PyBytes_AsStringAndSize(obj, (char **)&data, psize); + return data; + } +} + +/* "raw" strings */ + +typedef struct _rawstring { + Py_ssize_t size; + const char *data; + _deallocfunc dealloc; +} _rawstring; + +static void +_rawstring_init(_rawstring *raw) +{ + raw->size = 0; + raw->data = NULL; + raw->dealloc = NULL; +} + +static _rawstring * +_rawstring_new(void) +{ + _rawstring *raw = PyMem_NEW(_rawstring, 1); + if (raw == NULL) { + PyErr_NoMemory(); + return NULL; + } + _rawstring_init(raw); + return raw; +} + +static void +_rawstring_clear(_rawstring *raw) +{ + if (raw->data != NULL && raw->dealloc != NULL) { + (*raw->dealloc)((void *)raw->data); + } + _rawstring_init(raw); +} + +static void +_rawstring_free(_rawstring *raw) +{ + _rawstring_clear(raw); + PyMem_Free(raw); +} + +static int +_rawstring_is_clear(_rawstring *raw) +{ + return raw->size == 0 && raw->data == NULL && raw->dealloc == NULL; +} + +//static void +//_rawstring_move(_rawstring *raw, _rawstring *src) +//{ +// raw->size = src->size; +// raw->data = src->data; +// raw->dealloc = src->dealloc; +// _rawstring_init(src); +//} + +static void +_rawstring_proxy(_rawstring *raw, const char *str) +{ + if (str == NULL) { + str = ""; + } + raw->size = strlen(str); + raw->data = str; + raw->dealloc = NULL; +} + +static int +_rawstring_buffer(_rawstring *raw, Py_ssize_t size) +{ + raw->data = PyMem_Malloc(size+1); + if (raw->data == NULL) { + PyErr_NoMemory(); + return -1; + } + raw->size = size; + raw->dealloc = PyMem_Free; + return 0; +} + +static int +_rawstring_strcpy(_rawstring *raw, const char *str, Py_ssize_t size) +{ + _deallocfunc dealloc = NULL; + const char *copied = _strdup_and_size(str, &size, &dealloc); + if (copied == NULL) { + return -1; + } + + raw->size = size; + raw->dealloc = dealloc; + raw->data = copied; + return 0; +} + +static int +_rawstring_from_pyobj(_rawstring *raw, PyObject *obj) +{ + Py_ssize_t size = 0; + const char *data = _pyobj_get_str_and_size(obj, &size); + if (PyErr_Occurred()) { + return -1; + } + if (_rawstring_strcpy(raw, data, size) != 0) { + return -1; + } + return 0; +} + +static int +_rawstring_from_pyobj_attr(_rawstring *raw, PyObject *obj, const char *attr) +{ + int res = -1; + PyObject *valueobj = PyObject_GetAttrString(obj, attr); + if (valueobj == NULL) { + goto done; + } + if (!PyUnicode_Check(valueobj)) { + // XXX PyObject_Str()? Repr()? + goto done; + } + const char *valuestr = PyUnicode_AsUTF8(valueobj); + if (valuestr == NULL) { + if (PyErr_Occurred()) { + goto done; + } + } else if (_rawstring_strcpy(raw, valuestr, 0) != 0) { + _rawstring_clear(raw); + goto done; + } + res = 0; + +done: + Py_XDECREF(valueobj); + return res; +} + +static PyObject * +_rawstring_as_pybytes(_rawstring *raw) +{ + return PyBytes_FromStringAndSize(raw->data, raw->size); +} + + +/* object utils *************************************************************/ + +static void +_pyobj_identify_type(PyObject *obj, _rawstring *modname, _rawstring *clsname) +{ + PyObject *objtype = (PyObject *)Py_TYPE(obj); + + // Try __module__ and __name__. + if (_rawstring_from_pyobj_attr(modname, objtype, "__module__") != 0) { + // Fall back to the previous values in "modname". + IGNORE_FAILURE("bad __module__"); + } + if (_rawstring_from_pyobj_attr(clsname, objtype, "__name__") != 0) { + // Fall back to the previous values in "clsname". + IGNORE_FAILURE("bad __name__"); + } + + // XXX Fall back to __qualname__? + // XXX Fall back to tp_name? +} + +static PyObject * +_pyobj_get_class(const char *modname, const char *clsname) +{ + assert(clsname != NULL); + if (modname == NULL) { + modname = "builtins"; + } + + PyObject *module = PyImport_ImportModule(modname); + if (module == NULL) { + return NULL; + } + PyObject *cls = PyObject_GetAttrString(module, clsname); + Py_DECREF(module); + return cls; +} + +static PyObject * +_pyobj_create(const char *modname, const char *clsname, PyObject *arg) +{ + PyObject *cls = _pyobj_get_class(modname, clsname); + if (cls == NULL) { + return NULL; + } + PyObject *obj = NULL; + if (arg == NULL) { + obj = _PyObject_CallNoArg(cls); + } else { + obj = PyObject_CallFunction(cls, "O", arg); + } + Py_DECREF(cls); + return obj; +} + + +/* object snapshots */ + +typedef struct _objsnapshot { + // If modname is NULL then try "builtins" and "__main__". + _rawstring modname; + // clsname is required. + _rawstring clsname; + + // The rest are optional. + + // The serialized exception. + _rawstring *serialized; +} _objsnapshot; + +static void +_objsnapshot_init(_objsnapshot *osn) +{ + _rawstring_init(&osn->modname); + _rawstring_init(&osn->clsname); + osn->serialized = NULL; +} + +//static _objsnapshot * +//_objsnapshot_new(void) +//{ +// _objsnapshot *osn = PyMem_NEW(_objsnapshot, 1); +// if (osn == NULL) { +// PyErr_NoMemory(); +// return NULL; +// } +// _objsnapshot_init(osn); +// return osn; +//} + +static void +_objsnapshot_clear(_objsnapshot *osn) +{ + _rawstring_clear(&osn->modname); + _rawstring_clear(&osn->clsname); + if (osn->serialized != NULL) { + _rawstring_free(osn->serialized); + osn->serialized = NULL; + } +} + +//static void +//_objsnapshot_free(_objsnapshot *osn) +//{ +// _objsnapshot_clear(osn); +// PyMem_Free(osn); +//} + +static int +_objsnapshot_is_clear(_objsnapshot *osn) +{ + return osn->serialized == NULL + && _rawstring_is_clear(&osn->modname) + && _rawstring_is_clear(&osn->clsname); +} + +static void +_objsnapshot_summarize(_objsnapshot *osn, _rawstring *rawbuf, const char *msg) +{ + if (msg == NULL || *msg == '\0') { + // XXX Keep it NULL? + // XXX Keep it an empty string? + // XXX Use something more informative? + msg = ""; + } + const char *clsname = osn->clsname.data; + const char *modname = osn->modname.data; + if (modname && *modname == '\0') { + modname = NULL; + } + + // Prep the buffer. + Py_ssize_t size = strlen(clsname); + if (modname != NULL) { + if (strcmp(modname, "builtins") == 0) { + modname = NULL; + } else if (strcmp(modname, "__main__") == 0) { + modname = NULL; + } else { + size += strlen(modname) + 1; + } + } + if (msg != NULL) { + size += strlen(": ") + strlen(msg); + } + if (modname != NULL || msg != NULL) { + if (_rawstring_buffer(rawbuf, size) != 0) { + IGNORE_FAILURE("could not summarize object snapshot"); + return; + } + } + // ...else we'll proxy clsname as-is, so no need to allocate a buffer. + + // XXX Use __qualname__ somehow? + char *buf = (char *)rawbuf->data; + if (modname != NULL) { + if (msg != NULL) { + snprintf(buf, size+1, "%s.%s: %s", modname, clsname, msg); + } else { + snprintf(buf, size+1, "%s.%s", modname, clsname); + } + } else if (msg != NULL) { + snprintf(buf, size+1, "%s: %s", clsname, msg); + } else { + _rawstring_proxy(rawbuf, clsname); + } +} + +static _rawstring * +_objsnapshot_get_minimal_summary(_objsnapshot *osn, PyObject *obj) +{ + const char *str = NULL; + PyObject *objstr = PyObject_Str(obj); + if (objstr == NULL) { + PyErr_Clear(); + } else { + str = PyUnicode_AsUTF8(objstr); + if (str == NULL) { + PyErr_Clear(); + } + } + + _rawstring *summary = _rawstring_new(); + if (summary == NULL) { + return NULL; + } + _objsnapshot_summarize(osn, summary, str); + return summary; +} + +static void +_objsnapshot_extract(_objsnapshot *osn, PyObject *obj) +{ + assert(_objsnapshot_is_clear(osn)); + + // Get the "qualname". + _rawstring_proxy(&osn->modname, ""); + _rawstring_proxy(&osn->clsname, ""); + _pyobj_identify_type(obj, &osn->modname, &osn->clsname); + + // Serialize the object. + // XXX Use marshal? + PyObject *pickle = PyImport_ImportModule("pickle"); + if (pickle == NULL) { + IGNORE_FAILURE("could not serialize object: pickle import failed"); + return; + } + PyObject *objdata = PyObject_CallMethod(pickle, "dumps", "(O)", obj); + Py_DECREF(pickle); + if (objdata == NULL) { + IGNORE_FAILURE("could not serialize object: pickle.dumps failed"); + } else { + _rawstring *serialized = _rawstring_new(); + int res = _rawstring_from_pyobj(serialized, objdata); + Py_DECREF(objdata); + if (res != 0) { + IGNORE_FAILURE("could not serialize object: raw str failed"); + _rawstring_free(serialized); + } else if (serialized->size == 0) { + _rawstring_free(serialized); + } else { + osn->serialized = serialized; + } + } +} + +static PyObject * +_objsnapshot_resolve_serialized(_objsnapshot *osn) +{ + assert(osn->serialized != NULL); + + // XXX Use marshal? + PyObject *pickle = PyImport_ImportModule("pickle"); + if (pickle == NULL) { + return NULL; + } + PyObject *objdata = _rawstring_as_pybytes(osn->serialized); + if (objdata == NULL) { + return NULL; + } else { + PyObject *obj = PyObject_CallMethod(pickle, "loads", "O", objdata); + Py_DECREF(objdata); + return obj; + } +} + +static PyObject * +_objsnapshot_resolve_naive(_objsnapshot *osn, PyObject *arg) +{ + if (_rawstring_is_clear(&osn->clsname)) { + // We can't proceed without at least the class name. + PyErr_SetString(PyExc_ValueError, "missing class name"); + return NULL; + } + + if (osn->modname.data != NULL) { + return _pyobj_create(osn->modname.data, osn->clsname.data, arg); + } else { + PyObject *obj = _pyobj_create("builtins", osn->clsname.data, arg); + if (obj == NULL) { + PyErr_Clear(); + obj = _pyobj_create("__main__", osn->clsname.data, arg); + } + return obj; + } +} + +static PyObject * +_objsnapshot_resolve(_objsnapshot *osn) +{ + if (osn->serialized != NULL) { + PyObject *obj = _objsnapshot_resolve_serialized(osn); + if (obj != NULL) { + return obj; + } + IGNORE_FAILURE("could not de-serialize object"); + } + + // Fall back to naive resolution. + return _objsnapshot_resolve_naive(osn, NULL); +} + + +/* exception utils **********************************************************/ + +// _pyexc_create is inspired by _PyErr_SetObject(). + +static PyObject * +_pyexc_create(PyObject *exctype, const char *msg, PyObject *tb) +{ + assert(exctype != NULL && PyExceptionClass_Check(exctype)); + + PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL; + PyErr_Fetch(&curtype, &curexc, &curtb); + + // Create the object. + PyObject *exc = NULL; + if (msg != NULL) { + PyObject *msgobj = PyUnicode_FromString(msg); + if (msgobj == NULL) { + IGNORE_FAILURE("could not deserialize propagated error message"); + } + exc = _PyObject_CallOneArg(exctype, msgobj); + Py_XDECREF(msgobj); + } else { + exc = _PyObject_CallNoArg(exctype); + } + if (exc == NULL) { + return NULL; + } + + // Set the traceback, if any. + if (tb == NULL) { + tb = curtb; + } + if (tb != NULL) { + // This does *not* steal a reference! + PyException_SetTraceback(exc, tb); + } + + PyErr_Restore(curtype, curexc, curtb); + + return exc; +} + +/* traceback snapshots */ + +typedef struct _tbsnapshot { + _rawstring tbs_funcname; + _rawstring tbs_filename; + int tbs_lineno; + struct _tbsnapshot *tbs_next; +} _tbsnapshot; + +static void +_tbsnapshot_init(_tbsnapshot *tbs) +{ + _rawstring_init(&tbs->tbs_funcname); + _rawstring_init(&tbs->tbs_filename); + tbs->tbs_lineno = -1; + tbs->tbs_next = NULL; +} + +static _tbsnapshot * +_tbsnapshot_new(void) +{ + _tbsnapshot *tbs = PyMem_NEW(_tbsnapshot, 1); + if (tbs == NULL) { + PyErr_NoMemory(); + return NULL; + } + _tbsnapshot_init(tbs); + return tbs; +} + +static void _tbsnapshot_free(_tbsnapshot *); // forward + +static void +_tbsnapshot_clear(_tbsnapshot *tbs) +{ + _rawstring_clear(&tbs->tbs_funcname); + _rawstring_clear(&tbs->tbs_filename); + tbs->tbs_lineno = -1; + if (tbs->tbs_next != NULL) { + _tbsnapshot_free(tbs->tbs_next); + tbs->tbs_next = NULL; + } +} + +static void +_tbsnapshot_free(_tbsnapshot *tbs) +{ + _tbsnapshot_clear(tbs); + PyMem_Free(tbs); +} + +static int +_tbsnapshot_is_clear(_tbsnapshot *tbs) +{ + return tbs->tbs_lineno == -1 && tbs->tbs_next == NULL + && _rawstring_is_clear(&tbs->tbs_funcname) + && _rawstring_is_clear(&tbs->tbs_filename); +} + +static int +_tbsnapshot_from_pytb(_tbsnapshot *tbs, PyTracebackObject *pytb) +{ + assert(_tbsnapshot_is_clear(tbs)); + assert(pytb != NULL); + + PyCodeObject *pycode = pytb->tb_frame->f_code; + const char *funcname = PyUnicode_AsUTF8(pycode->co_name); + if (_rawstring_strcpy(&tbs->tbs_funcname, funcname, 0) != 0) { + goto error; + } + const char *filename = PyUnicode_AsUTF8(pycode->co_filename); + if (_rawstring_strcpy(&tbs->tbs_filename, filename, 0) != 0) { + goto error; + } + tbs->tbs_lineno = pytb->tb_lineno; + + return 0; + +error: + _tbsnapshot_clear(tbs); + return -1; +} + +static int +_tbsnapshot_extract(_tbsnapshot *tbs, PyTracebackObject *pytb) +{ + assert(_tbsnapshot_is_clear(tbs)); + assert(pytb != NULL); + + _tbsnapshot *next = NULL; + while (pytb->tb_next != NULL) { + _tbsnapshot *_next = _tbsnapshot_new(); + if (_next == NULL) { + goto error; + } + if (_tbsnapshot_from_pytb(_next, pytb) != 0) { + goto error; + } + if (next != NULL) { + _next->tbs_next = next; + } + next = _next; + pytb = pytb->tb_next; + } + if (_tbsnapshot_from_pytb(tbs, pytb) != 0) { + goto error; + } + tbs->tbs_next = next; + + return 0; + +error: + _tbsnapshot_clear(tbs); + return -1; +} + +static PyObject * +_tbsnapshot_resolve(_tbsnapshot *tbs) +{ + assert(!PyErr_Occurred()); + // At this point there should be no traceback set yet. + + while (tbs != NULL) { + const char *funcname = tbs->tbs_funcname.data; + const char *filename = tbs->tbs_filename.data; + _PyTraceback_Add(funcname ? funcname : "", + filename ? filename : "", + tbs->tbs_lineno); + tbs = tbs->tbs_next; + } + + PyObject *exctype = NULL, *excval = NULL, *tb = NULL; + PyErr_Fetch(&exctype, &excval, &tb); + // Leave it cleared. + return tb; +} + +/* exception snapshots */ + +typedef struct _excsnapshot { + _objsnapshot es_object; + _rawstring *es_msg; + struct _excsnapshot *es_cause; + struct _excsnapshot *es_context; + char es_suppress_context; + struct _tbsnapshot *es_traceback; +} _excsnapshot; + +static void +_excsnapshot_init(_excsnapshot *es) +{ + _objsnapshot_init(&es->es_object); + es->es_msg = NULL; + es->es_cause = NULL; + es->es_context = NULL; + es->es_suppress_context = 0; + es->es_traceback = NULL; +} + +static _excsnapshot * +_excsnapshot_new(void) { + _excsnapshot *es = PyMem_NEW(_excsnapshot, 1); + if (es == NULL) { + PyErr_NoMemory(); + return NULL; + } + _excsnapshot_init(es); + return es; +} + +static void _excsnapshot_free(_excsnapshot *); // forward + +static void +_excsnapshot_clear(_excsnapshot *es) +{ + _objsnapshot_clear(&es->es_object); + if (es->es_msg != NULL) { + _rawstring_free(es->es_msg); + es->es_msg = NULL; + } + if (es->es_cause != NULL) { + _excsnapshot_free(es->es_cause); + es->es_cause = NULL; + } + if (es->es_context != NULL) { + _excsnapshot_free(es->es_context); + es->es_context = NULL; + } + es->es_suppress_context = 0; + if (es->es_traceback != NULL) { + _tbsnapshot_free(es->es_traceback); + es->es_traceback = NULL; + } +} + +static void +_excsnapshot_free(_excsnapshot *es) +{ + _excsnapshot_clear(es); + PyMem_Free(es); +} + +static int +_excsnapshot_is_clear(_excsnapshot *es) +{ + return es->es_suppress_context == 0 + && es->es_cause == NULL + && es->es_context == NULL + && es->es_traceback == NULL + && es->es_msg == NULL + && _objsnapshot_is_clear(&es->es_object); +} + +static PyObject * +_excsnapshot_get_exc_naive(_excsnapshot *es) +{ + _rawstring buf; + const char *msg = NULL; + if (es->es_msg != NULL) { + msg = es->es_msg->data; + } else { + _objsnapshot_summarize(&es->es_object, &buf, NULL); + if (buf.size > 0) { + msg = buf.data; + } + } + + PyObject *exc = NULL; + // XXX Use _objsnapshot_resolve_naive()? + const char *modname = es->es_object.modname.size > 0 + ? es->es_object.modname.data + : NULL; + PyObject *exctype = _pyobj_get_class(modname, es->es_object.clsname.data); + if (exctype != NULL) { + exc = _pyexc_create(exctype, msg, NULL); + Py_DECREF(exctype); + if (exc != NULL) { + return exc; + } + PyErr_Clear(); + } else { + PyErr_Clear(); + } + exctype = PyExc_Exception; + return _pyexc_create(exctype, msg, NULL); +} + +static PyObject * +_excsnapshot_get_exc(_excsnapshot *es) +{ + assert(!_objsnapshot_is_clear(&es->es_object)); + + PyObject *exc = _objsnapshot_resolve(&es->es_object); + if (exc == NULL) { + // Fall back to resolving the object. + PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL; + PyErr_Fetch(&curtype, &curexc, &curtb); + + exc = _excsnapshot_get_exc_naive(es); + if (exc == NULL) { + PyErr_Restore(curtype, curexc, curtb); + return NULL; + } + } + // People can do some weird stuff... + if (!PyExceptionInstance_Check(exc)) { + // We got a bogus "exception". + Py_DECREF(exc); + PyErr_SetString(PyExc_TypeError, "expected exception"); + return NULL; + } + return exc; +} + +static void _excsnapshot_extract(_excsnapshot *, PyObject *); +static void +_excsnapshot_extract(_excsnapshot *es, PyObject *excobj) +{ + assert(_excsnapshot_is_clear(es)); + assert(PyExceptionInstance_Check(excobj)); + + _objsnapshot_extract(&es->es_object, excobj); + + es->es_msg = _objsnapshot_get_minimal_summary(&es->es_object, excobj); + if (es->es_msg == NULL) { + PyErr_Clear(); + } + + PyBaseExceptionObject *exc = (PyBaseExceptionObject *)excobj; + + if (exc->cause != NULL && exc->cause != Py_None) { + es->es_cause = _excsnapshot_new(); + _excsnapshot_extract(es->es_cause, exc->cause); + } + + if (exc->context != NULL && exc->context != Py_None) { + es->es_context = _excsnapshot_new(); + _excsnapshot_extract(es->es_context, exc->context); + } + + es->es_suppress_context = exc->suppress_context; + + PyObject *tb = PyException_GetTraceback(excobj); + if (PyErr_Occurred()) { + IGNORE_FAILURE("could not get traceback"); + } else if (tb == Py_None) { + Py_DECREF(tb); + tb = NULL; + } + if (tb != NULL) { + es->es_traceback = _tbsnapshot_new(); + if (_tbsnapshot_extract(es->es_traceback, + (PyTracebackObject *)tb) != 0) { + IGNORE_FAILURE("could not extract __traceback__"); + } + } +} + +static PyObject * +_excsnapshot_resolve(_excsnapshot *es) +{ + PyObject *exc = _excsnapshot_get_exc(es); + if (exc == NULL) { + return NULL; + } + + if (es->es_traceback != NULL) { + PyObject *tb = _tbsnapshot_resolve(es->es_traceback); + if (tb == NULL) { + // The snapshot is still somewhat useful without this. + IGNORE_FAILURE("could not deserialize traceback"); + } else { + // This does not steal references. + PyException_SetTraceback(exc, tb); + Py_DECREF(tb); + } + } + // NULL means "not set". + + if (es->es_context != NULL) { + PyObject *context = _excsnapshot_resolve(es->es_context); + if (context == NULL) { + // The snapshot is still useful without this. + IGNORE_FAILURE("could not deserialize __context__"); + } else { + // This steals references but we have one to give. + PyException_SetContext(exc, context); + } + } + // NULL means "not set". + + if (es->es_cause != NULL) { + PyObject *cause = _excsnapshot_resolve(es->es_cause); + if (cause == NULL) { + // The snapshot is still useful without this. + IGNORE_FAILURE("could not deserialize __cause__"); + } else { + // This steals references, but we have one to give. + PyException_SetCause(exc, cause); + } + } + // NULL means "not set". + + ((PyBaseExceptionObject *)exc)->suppress_context = es->es_suppress_context; + + return exc; } /* data-sharing-specific code ***********************************************/ +/* shared "object" */ + struct _sharednsitem { - char *name; + _rawstring name; _PyCrossInterpreterData data; }; @@ -44,8 +929,7 @@ static void _sharednsitem_clear(struct _sharednsitem *); // forward static int _sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value) { - item->name = _copy_raw_string(key); - if (item->name == NULL) { + if (_rawstring_from_pyobj(&item->name, key) != 0) { return -1; } if (_PyObject_GetCrossInterpreterData(value, &item->data) != 0) { @@ -58,17 +942,14 @@ _sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value) static void _sharednsitem_clear(struct _sharednsitem *item) { - if (item->name != NULL) { - PyMem_Free(item->name); - item->name = NULL; - } + _rawstring_clear(&item->name); _PyCrossInterpreterData_Release(&item->data); } static int _sharednsitem_apply(struct _sharednsitem *item, PyObject *ns) { - PyObject *name = PyUnicode_FromString(item->name); + PyObject *name = PyUnicode_FromString(item->name.data); if (name == NULL) { return -1; } @@ -159,121 +1040,119 @@ _sharedns_apply(_sharedns *shared, PyObject *ns) return 0; } +/* shared exception */ + // Ultimately we'd like to preserve enough information about the // exception and traceback that we could re-constitute (or at least // simulate, a la traceback.TracebackException), and even chain, a copy // of the exception in the calling interpreter. typedef struct _sharedexception { - char *name; - char *msg; + _excsnapshot snapshot; + _rawstring msg; } _sharedexception; +static void +_sharedexception_init(_sharedexception *she) +{ + _excsnapshot_init(&she->snapshot); + _rawstring_init(&she->msg); +} + static _sharedexception * _sharedexception_new(void) { - _sharedexception *err = PyMem_NEW(_sharedexception, 1); - if (err == NULL) { + _sharedexception *she = PyMem_NEW(_sharedexception, 1); + if (she == NULL) { PyErr_NoMemory(); return NULL; } - err->name = NULL; - err->msg = NULL; - return err; + _sharedexception_init(she); + return she; } static void -_sharedexception_clear(_sharedexception *exc) +_sharedexception_clear(_sharedexception *she) { - if (exc->name != NULL) { - PyMem_Free(exc->name); - } - if (exc->msg != NULL) { - PyMem_Free(exc->msg); - } + _excsnapshot_clear(&she->snapshot); + _rawstring_clear(&she->msg); } static void -_sharedexception_free(_sharedexception *exc) +_sharedexception_free(_sharedexception *she) { - _sharedexception_clear(exc); - PyMem_Free(exc); + _sharedexception_clear(she); + PyMem_Free(she); } -static _sharedexception * -_sharedexception_bind(PyObject *exctype, PyObject *exc, PyObject *tb) +static int +_sharedexception_is_clear(_sharedexception *she) { - assert(exctype != NULL); - char *failure = NULL; - - _sharedexception *err = _sharedexception_new(); - if (err == NULL) { - goto finally; - } + return 1 + && _excsnapshot_is_clear(&she->snapshot) + && _rawstring_is_clear(&she->msg); +} - PyObject *name = PyUnicode_FromFormat("%S", exctype); - if (name == NULL) { - failure = "unable to format exception type name"; - goto finally; - } - err->name = _copy_raw_string(name); - Py_DECREF(name); - if (err->name == NULL) { - if (PyErr_ExceptionMatches(PyExc_MemoryError)) { - failure = "out of memory copying exception type name"; - } else { - failure = "unable to encode and copy exception type name"; +static PyObject * +_sharedexception_get_cause(_sharedexception *sharedexc) +{ + // FYI, "cause" is already normalized. + PyObject *cause = _excsnapshot_resolve(&sharedexc->snapshot); + if (cause == NULL) { + if (PyErr_Occurred()) { + IGNORE_FAILURE("could not deserialize exc snapshot"); } - goto finally; + return NULL; } + // XXX Ensure "cause" has a traceback. + return cause; +} - if (exc != NULL) { - PyObject *msg = PyUnicode_FromFormat("%S", exc); - if (msg == NULL) { - failure = "unable to format exception message"; - goto finally; - } - err->msg = _copy_raw_string(msg); - Py_DECREF(msg); - if (err->msg == NULL) { - if (PyErr_ExceptionMatches(PyExc_MemoryError)) { - failure = "out of memory copying exception message"; - } else { - failure = "unable to encode and copy exception message"; - } - goto finally; - } - } +static void +_sharedexception_extract(_sharedexception *she, PyObject *exc) +{ + assert(_sharedexception_is_clear(she)); + assert(exc != NULL); -finally: - if (failure != NULL) { - PyErr_Clear(); - if (err->name != NULL) { - PyMem_Free(err->name); - err->name = NULL; + _excsnapshot_extract(&she->snapshot, exc); + + // Compose the message. + const char *msg = NULL; + PyObject *msgobj = PyUnicode_FromFormat("%S", exc); + if (msgobj == NULL) { + IGNORE_FAILURE("unable to format exception message"); + } else { + msg = PyUnicode_AsUTF8(msgobj); + if (PyErr_Occurred()) { + PyErr_Clear(); } - err->msg = failure; } - return err; + _objsnapshot_summarize(&she->snapshot.es_object, &she->msg, msg); + Py_XDECREF(msgobj); } -static void -_sharedexception_apply(_sharedexception *exc, PyObject *wrapperclass) +static PyObject * +_sharedexception_resolve(_sharedexception *sharedexc, PyObject *wrapperclass) { - if (exc->name != NULL) { - if (exc->msg != NULL) { - PyErr_Format(wrapperclass, "%s: %s", exc->name, exc->msg); - } - else { - PyErr_SetString(wrapperclass, exc->name); - } - } - else if (exc->msg != NULL) { - PyErr_SetString(wrapperclass, exc->msg); - } - else { - PyErr_SetNone(wrapperclass); + assert(!PyErr_Occurred()); + + // Get the exception object (already normalized). + PyObject *exc = _pyexc_create(wrapperclass, sharedexc->msg.data, NULL); + assert(exc != NULL); + + // Set __cause__, is possible. + PyObject *cause = _sharedexception_get_cause(sharedexc); + if (cause != NULL) { + // Set __context__. + Py_INCREF(cause); // PyException_SetContext() steals a reference. + PyException_SetContext(exc, cause); + + // Set __cause__. + Py_INCREF(cause); // PyException_SetCause() steals a reference. + PyException_SetCause(exc, cause); } + + return exc; } @@ -1869,11 +2748,9 @@ _ensure_not_running(PyInterpreterState *interp) static int _run_script(PyInterpreterState *interp, const char *codestr, - _sharedns *shared, _sharedexception **exc) + _sharedns *shared, _sharedexception **pexc) { - PyObject *exctype = NULL; - PyObject *excval = NULL; - PyObject *tb = NULL; + assert(!PyErr_Occurred()); // ...in the called interpreter. PyObject *main_mod = _PyInterpreterState_GetMainModule(interp); if (main_mod == NULL) { @@ -1904,25 +2781,38 @@ _run_script(PyInterpreterState *interp, const char *codestr, Py_DECREF(result); // We throw away the result. } - *exc = NULL; + *pexc = NULL; return 0; + PyObject *exctype = NULL, *exc = NULL, *tb = NULL; error: - PyErr_Fetch(&exctype, &excval, &tb); + PyErr_Fetch(&exctype, &exc, &tb); - _sharedexception *sharedexc = _sharedexception_bind(exctype, excval, tb); - Py_XDECREF(exctype); - Py_XDECREF(excval); - Py_XDECREF(tb); - if (sharedexc == NULL) { - fprintf(stderr, "RunFailedError: script raised an uncaught exception"); - PyErr_Clear(); - sharedexc = NULL; + // First normalize the exception. + PyErr_NormalizeException(&exctype, &exc, &tb); + assert(PyExceptionInstance_Check(exc)); + if (tb != NULL) { + PyException_SetTraceback(exc, tb); } - else { + + // Behave as though the exception was caught in this thread. + PyErr_SetExcInfo(exctype, exc, tb); // Like entering "except" block. + + // Serialize the exception. + _sharedexception *sharedexc = _sharedexception_new(); + if (sharedexc == NULL) { + IGNORE_FAILURE("script raised an uncaught exception"); + } else { + _sharedexception_extract(sharedexc, exc); assert(!PyErr_Occurred()); } - *exc = sharedexc; + + // Clear the exception. + PyErr_SetExcInfo(NULL, NULL, NULL); // Like leaving "except" block. + PyErr_Clear(); // Do not re-raise. + + // "Return" the serialized exception. + *pexc = sharedexc; return -1; } @@ -1930,6 +2820,8 @@ static int _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, PyObject *shareables) { + assert(!PyErr_Occurred()); // ...in the calling interpreter. + if (_ensure_not_running(interp) < 0) { return -1; } @@ -1963,8 +2855,8 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, } // Run the script. - _sharedexception *exc = NULL; - int result = _run_script(interp, codestr, shared, &exc); + _sharedexception *sharedexc = NULL; + int result = _run_script(interp, codestr, shared, &sharedexc); // Switch back. if (save_tstate != NULL) { @@ -1973,9 +2865,14 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, #endif // Propagate any exception out to the caller. - if (exc != NULL) { - _sharedexception_apply(exc, RunFailedError); - _sharedexception_free(exc); + if (sharedexc != NULL) { + assert(!PyErr_Occurred()); + PyObject *exc = _sharedexception_resolve(sharedexc, RunFailedError); + // XXX This is not safe once interpreters no longer share allocators. + _sharedexception_free(sharedexc); + PyObject *exctype = (PyObject *)Py_TYPE(exc); + Py_INCREF(exctype); // PyErr_Restore() steals a reference. + PyErr_Restore(exctype, exc, PyException_GetTraceback(exc)); } else if (result != 0) { // We were unable to allocate a shared exception. From webhook-mailer at python.org Thu May 7 13:16:09 2020 From: webhook-mailer at python.org (Hai Shi) Date: Thu, 07 May 2020 17:16:09 -0000 Subject: [Python-checkins] bpo-38787: Update structures.rst docs (PEP 573) (GH-19980) Message-ID: https://github.com/python/cpython/commit/c068b53a0ca6ebf740d98e422569d2f705e54f93 commit: c068b53a0ca6ebf740d98e422569d2f705e54f93 branch: master author: Hai Shi committer: GitHub date: 2020-05-07T19:16:01+02:00 summary: bpo-38787: Update structures.rst docs (PEP 573) (GH-19980) files: M Doc/c-api/structures.rst M Modules/_testmultiphase.c diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 72c94459295c4..ea97e1e715561 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -150,7 +150,7 @@ Implementing functions and methods The function signature is:: PyObject *PyCFunction(PyObject *self, - PyObject *const *args); + PyObject *args); .. c:type:: PyCFunctionWithKeywords @@ -159,7 +159,7 @@ Implementing functions and methods The function signature is:: PyObject *PyCFunctionWithKeywords(PyObject *self, - PyObject *const *args, + PyObject *args, PyObject *kwargs); diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index 3084fc12a5ef5..d69ae628fa7a4 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -820,16 +820,12 @@ static PyModuleDef_Slot meth_state_access_slots[] = { }; static PyModuleDef def_meth_state_access = { - PyModuleDef_HEAD_INIT, /* m_base */ - "_testmultiphase_meth_state_access", /* m_name */ - PyDoc_STR("Module testing access" - " to state from methods."), - sizeof(meth_state), /* m_size */ - NULL, /* m_methods */ - meth_state_access_slots, /* m_slots */ - 0, /* m_traverse */ - 0, /* m_clear */ - 0, /* m_free */ + PyModuleDef_HEAD_INIT, + .m_name = "_testmultiphase_meth_state_access", + .m_doc = PyDoc_STR("Module testing access" + " to state from methods."), + .m_size = sizeof(meth_state), + .m_slots = meth_state_access_slots, }; PyMODINIT_FUNC From webhook-mailer at python.org Thu May 7 16:42:22 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 07 May 2020 20:42:22 -0000 Subject: [Python-checkins] bpo-40548: Always run GitHub action, even on doc PRs (GH-19981) Message-ID: https://github.com/python/cpython/commit/4e363761fc02a89d53aba4382dc451293bd6f0ba commit: 4e363761fc02a89d53aba4382dc451293bd6f0ba branch: master author: Victor Stinner committer: GitHub date: 2020-05-07T22:42:14+02:00 summary: bpo-40548: Always run GitHub action, even on doc PRs (GH-19981) Always run GitHub action jobs, even on documentation-only pull requests. So it will be possible to make a GitHub action job, like the Windows (64-bit) job, mandatory. files: M .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 50d1561518bd8..6e6a6d2b789d3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,26 +1,19 @@ name: Tests +# bpo-40548: "paths-ignore" is not used to skip documentation-only PRs, because +# it prevents to mark a job as mandatory. A PR cannot be merged if a job is +# mandatory but not scheduled because of "paths-ignore". on: push: branches: - master - 3.8 - 3.7 - paths-ignore: - - 'Doc/**' - - 'Misc/**' - - '**/*.md' - - '**/*.rst' pull_request: branches: - master - 3.8 - 3.7 - paths-ignore: - - 'Doc/**' - - 'Misc/**' - - '**/*.md' - - '**/*.rst' jobs: build_win32: From webhook-mailer at python.org Thu May 7 16:57:35 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Thu, 07 May 2020 20:57:35 -0000 Subject: [Python-checkins] bpo-40517: Implement syntax highlighting support for ASDL (GH-19967) Message-ID: https://github.com/python/cpython/commit/b7a78ca74ab539943ab11b5c4c9cfab7f5b7ff5a commit: b7a78ca74ab539943ab11b5c4c9cfab7f5b7ff5a branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-07T13:57:26-07:00 summary: bpo-40517: Implement syntax highlighting support for ASDL (GH-19967) files: A Doc/tools/extensions/asdl_highlight.py M Doc/conf.py M Doc/library/ast.rst diff --git a/Doc/conf.py b/Doc/conf.py index 32db34344a70a..12d74ea24ce4a 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -14,7 +14,8 @@ # --------------------- extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest', - 'pyspecific', 'c_annotations', 'escape4chm'] + 'pyspecific', 'c_annotations', 'escape4chm', + 'asdl_highlight'] doctest_global_setup = ''' diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index fc04114949c0c..6c6ad01b842c8 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -35,7 +35,7 @@ Abstract Grammar The abstract grammar is currently defined as follows: .. literalinclude:: ../../Parser/Python.asdl - :language: none + :language: asdl Node classes diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/asdl_highlight.py new file mode 100644 index 0000000000000..7d2ef011c1b76 --- /dev/null +++ b/Doc/tools/extensions/asdl_highlight.py @@ -0,0 +1,51 @@ +import os +import sys +sys.path.append(os.path.abspath("../Parser/")) + +from pygments.lexer import RegexLexer, bygroups, include, words +from pygments.token import (Comment, Generic, Keyword, Name, Operator, + Punctuation, Text) + +from asdl import builtin_types +from sphinx.highlighting import lexers + +class ASDLLexer(RegexLexer): + name = "ASDL" + aliases = ["asdl"] + filenames = ["*.asdl"] + _name = r"([^\W\d]\w*)" + _text_ws = r"(\s*)" + + tokens = { + "ws": [ + (r"\n", Text), + (r"\s+", Text), + (r"--.*?$", Comment.Singleline), + ], + "root": [ + include("ws"), + ( + r"(module)" + _text_ws + _name, + bygroups(Keyword, Text, Name.Tag), + ), + ( + r"(\w+)(\*\s|\?\s|\s)(\w+)", + bygroups(Name.Builtin.Pseudo, Operator, Name), + ), + (words(builtin_types), Name.Builtin), + (r"attributes", Name.Builtin), + ( + _name + _text_ws + "(=)", + bygroups(Name, Text, Operator), + ), + (_name, Name.Class), + (r"\|", Operator), + (r"{|}|\(|\)", Punctuation), + (r".", Text), + ], + } + + +def setup(app): + lexers["asdl"] = ASDLLexer() + return {'version': '1.0', 'parallel_read_safe': True} From webhook-mailer at python.org Thu May 7 22:38:48 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 08 May 2020 02:38:48 -0000 Subject: [Python-checkins] bpo-40555: Check for p->error_indicator in loop rules after the main loop is done (GH-19986) Message-ID: https://github.com/python/cpython/commit/db9163ceef31ba00ccb23226917f9c8e9142a0b8 commit: db9163ceef31ba00ccb23226917f9c8e9142a0b8 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-08T03:38:44+01:00 summary: bpo-40555: Check for p->error_indicator in loop rules after the main loop is done (GH-19986) files: M Lib/test/test_eof.py M Parser/pegen/parse.c M Tools/peg_generator/pegen/c_generator.py diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py index 9ef8eb1187486..bebad3106119e 100644 --- a/Lib/test/test_eof.py +++ b/Lib/test/test_eof.py @@ -26,6 +26,15 @@ def test_EOFS(self): else: raise support.TestFailed + def test_eof_with_line_continuation(self): + expect = "unexpected EOF while parsing (, line 1)" + try: + compile('"\\xhh" \\', '', 'exec', dont_inherit=True) + except SyntaxError as msg: + self.assertEqual(str(msg), expect) + else: + raise support.TestFailed + def test_line_continuation_EOF(self): """A continuation at the end of input must be an error; bpo2180.""" expect = 'unexpected EOF while parsing (, line 1)' diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 3a08abbca581c..ae86841e8663b 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -11437,7 +11437,7 @@ _loop1_11_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -11876,7 +11876,7 @@ _loop1_22_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -12252,7 +12252,7 @@ _loop1_31_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -12911,7 +12911,7 @@ _loop1_47_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13267,7 +13267,7 @@ _loop1_56_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13369,7 +13369,7 @@ _loop1_58_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13422,7 +13422,7 @@ _loop1_59_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13475,7 +13475,7 @@ _loop1_60_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13577,7 +13577,7 @@ _loop1_62_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13679,7 +13679,7 @@ _loop1_64_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13781,7 +13781,7 @@ _loop1_66_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13834,7 +13834,7 @@ _loop1_67_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14007,7 +14007,7 @@ _loop1_71_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14145,7 +14145,7 @@ _loop1_74_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14345,7 +14345,7 @@ _loop1_78_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14447,7 +14447,7 @@ _loop1_80_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14500,7 +14500,7 @@ _loop1_81_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14553,7 +14553,7 @@ _loop1_82_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14655,7 +14655,7 @@ _loop1_84_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14757,7 +14757,7 @@ _loop1_86_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14859,7 +14859,7 @@ _loop1_88_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14912,7 +14912,7 @@ _loop1_89_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14965,7 +14965,7 @@ _loop1_90_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -15018,7 +15018,7 @@ _loop1_91_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -15358,7 +15358,7 @@ _loop1_99_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -15567,7 +15567,7 @@ _loop1_104_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -17165,7 +17165,7 @@ _loop1_145_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 3bf6d9ed6a3ab..b7a9942c2fdd2 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -525,7 +525,7 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: rulename=node.name if memoize else None, ) if is_repeat1: - self.print("if (n == 0) {") + self.print("if (n == 0 || p->error_indicator) {") with self.indent(): self.print("PyMem_Free(children);") self.print("return NULL;") From webhook-mailer at python.org Fri May 8 01:25:58 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Fri, 08 May 2020 05:25:58 -0000 Subject: [Python-checkins] bpo-40273: Reversible mappingproxy (FH-19513) Message-ID: https://github.com/python/cpython/commit/02fa0ea9c1073e4476c9bde3d7112f5dd964aa57 commit: 02fa0ea9c1073e4476c9bde3d7112f5dd964aa57 branch: master author: Zackery Spytz committer: GitHub date: 2020-05-07T22:25:50-07:00 summary: bpo-40273: Reversible mappingproxy (FH-19513) files: A Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst M Doc/library/types.rst M Lib/test/test_types.py M Objects/descrobject.c diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 4cb91c1a90bcf..1d081e2c54868 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -329,6 +329,12 @@ Standard names are defined for the following types: Return a new view of the underlying mapping's values. + .. describe:: reversed(proxy) + + Return a reverse iterator over the keys of the underlying mapping. + + .. versionadded:: 3.9 + Additional Utility Classes and Functions ---------------------------------------- diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index f42238762ddcc..28ebfb6e603e3 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -627,6 +627,7 @@ def test_methods(self): '__iter__', '__len__', '__or__', + '__reversed__', '__ror__', 'copy', 'get', @@ -768,6 +769,14 @@ def test_iterators(self): self.assertEqual(set(view.values()), set(values)) self.assertEqual(set(view.items()), set(items)) + def test_reversed(self): + d = {'a': 1, 'b': 2, 'foo': 0, 'c': 3, 'd': 4} + mp = self.mappingproxy(d) + del d['foo'] + r = reversed(mp) + self.assertEqual(list(r), list('dcba')) + self.assertRaises(StopIteration, next, r) + def test_copy(self): original = {'key1': 27, 'key2': 51, 'key3': 93} view = self.mappingproxy(original) diff --git a/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst b/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst new file mode 100644 index 0000000000000..50f547f56c520 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst @@ -0,0 +1 @@ +:class:`types.MappingProxyType` is now reversible. diff --git a/Objects/descrobject.c b/Objects/descrobject.c index c9754a11b89be..c29cf7a4c4464 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -1118,6 +1118,13 @@ mappingproxy_copy(mappingproxyobject *pp, PyObject *Py_UNUSED(ignored)) return _PyObject_CallMethodIdNoArgs(pp->mapping, &PyId_copy); } +static PyObject * +mappingproxy_reversed(mappingproxyobject *pp, PyObject *Py_UNUSED(ignored)) +{ + _Py_IDENTIFIER(__reversed__); + return _PyObject_CallMethodIdNoArgs(pp->mapping, &PyId___reversed__); +} + /* WARNING: mappingproxy methods must not give access to the underlying mapping */ @@ -1135,6 +1142,8 @@ static PyMethodDef mappingproxy_methods[] = { PyDoc_STR("D.copy() -> a shallow copy of D")}, {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, + {"__reversed__", (PyCFunction)mappingproxy_reversed, METH_NOARGS, + PyDoc_STR("D.__reversed__() -> reverse iterator")}, {0} }; From webhook-mailer at python.org Fri May 8 06:54:42 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Fri, 08 May 2020 10:54:42 -0000 Subject: [Python-checkins] bpo-40559: Add Py_DECREF to _asynciomodule.c:task_step_impl() (GH-19990) Message-ID: https://github.com/python/cpython/commit/d2c349b190bcba21a4a38e6520a48ad97a9f1529 commit: d2c349b190bcba21a4a38e6520a48ad97a9f1529 branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-08T03:54:38-07:00 summary: bpo-40559: Add Py_DECREF to _asynciomodule.c:task_step_impl() (GH-19990) This fixes a possible memory leak in the C implementation of asyncio.Task. files: A Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst M Modules/_asynciomodule.c diff --git a/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst new file mode 100644 index 0000000000000..15846351f25bb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst @@ -0,0 +1 @@ +Fix possible memory leak in the C implementation of :class:`asyncio.Task`. \ No newline at end of file diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index a03a63119bab3..cc211a8895a8e 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -2638,6 +2638,10 @@ task_step_impl(TaskObj *task, PyObject *exc) coro = task->task_coro; if (coro == NULL) { PyErr_SetString(PyExc_RuntimeError, "uninitialized Task object"); + if (clear_exc) { + /* We created 'exc' during this call */ + Py_DECREF(exc); + } return NULL; } From webhook-mailer at python.org Fri May 8 07:28:43 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 08 May 2020 11:28:43 -0000 Subject: [Python-checkins] [3.8] bpo-40559: Add Py_DECREF to _asynciomodule.c:task_step_impl() (GH-19990) Message-ID: https://github.com/python/cpython/commit/0e4a5e96f011989736bde824ab817146bd7c9cfc commit: 0e4a5e96f011989736bde824ab817146bd7c9cfc branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-08T04:28:38-07:00 summary: [3.8] bpo-40559: Add Py_DECREF to _asynciomodule.c:task_step_impl() (GH-19990) This fixes a possible memory leak in the C implementation of asyncio.Task. (cherry picked from commit d2c349b190bcba21a4a38e6520a48ad97a9f1529) Co-authored-by: Chris Jerdonek files: A Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst M Modules/_asynciomodule.c diff --git a/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst new file mode 100644 index 0000000000000..15846351f25bb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst @@ -0,0 +1 @@ +Fix possible memory leak in the C implementation of :class:`asyncio.Task`. \ No newline at end of file diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 5ba2bc4975709..8e1cd4f52a540 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -2626,6 +2626,10 @@ task_step_impl(TaskObj *task, PyObject *exc) coro = task->task_coro; if (coro == NULL) { PyErr_SetString(PyExc_RuntimeError, "uninitialized Task object"); + if (clear_exc) { + /* We created 'exc' during this call */ + Py_DECREF(exc); + } return NULL; } From webhook-mailer at python.org Fri May 8 07:30:35 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 08 May 2020 11:30:35 -0000 Subject: [Python-checkins] [3.7] bpo-40559: Add Py_DECREF to _asynciomodule.c:task_step_impl() (GH-19990) Message-ID: https://github.com/python/cpython/commit/25014289887cb521c1041df4773c839d3fbf784e commit: 25014289887cb521c1041df4773c839d3fbf784e branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-08T04:30:30-07:00 summary: [3.7] bpo-40559: Add Py_DECREF to _asynciomodule.c:task_step_impl() (GH-19990) This fixes a possible memory leak in the C implementation of asyncio.Task. (cherry picked from commit d2c349b190bcba21a4a38e6520a48ad97a9f1529) Co-authored-by: Chris Jerdonek files: A Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst M Modules/_asynciomodule.c diff --git a/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst new file mode 100644 index 0000000000000..15846351f25bb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst @@ -0,0 +1 @@ +Fix possible memory leak in the C implementation of :class:`asyncio.Task`. \ No newline at end of file diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 441506bc82a14..7880de327f637 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -2567,6 +2567,10 @@ task_step_impl(TaskObj *task, PyObject *exc) coro = task->task_coro; if (coro == NULL) { PyErr_SetString(PyExc_RuntimeError, "uninitialized Task object"); + if (clear_exc) { + /* We created 'exc' during this call */ + Py_DECREF(exc); + } return NULL; } From webhook-mailer at python.org Fri May 8 07:40:01 2020 From: webhook-mailer at python.org (Ned Batchelder) Date: Fri, 08 May 2020 11:40:01 -0000 Subject: [Python-checkins] Make the first dataclass example more useful (GH-19994) Message-ID: https://github.com/python/cpython/commit/2effef7453986bf43a6d921cd471a8bc0722c36a commit: 2effef7453986bf43a6d921cd471a8bc0722c36a branch: master author: Ned Batchelder committer: GitHub date: 2020-05-08T04:39:57-07:00 summary: Make the first dataclass example more useful (GH-19994) files: M Doc/library/dataclasses.rst diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index c125a1130a960..fe63d20671dd7 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -19,6 +19,8 @@ in :pep:`557`. The member variables to use in these generated methods are defined using :pep:`526` type annotations. For example this code:: + from dataclasses import dataclass + @dataclass class InventoryItem: '''Class for keeping track of an item in inventory.''' From webhook-mailer at python.org Fri May 8 07:52:20 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 08 May 2020 11:52:20 -0000 Subject: [Python-checkins] Make the first dataclass example more useful (GH-19994) (GH-19997) Message-ID: https://github.com/python/cpython/commit/546f643487f5116b795983c951141e0de78a0049 commit: 546f643487f5116b795983c951141e0de78a0049 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-08T07:52:10-04:00 summary: Make the first dataclass example more useful (GH-19994) (GH-19997) (cherry picked from commit 2effef7453986bf43a6d921cd471a8bc0722c36a) Co-authored-by: Ned Batchelder Co-authored-by: Ned Batchelder files: M Doc/library/dataclasses.rst diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index 37258d4ebc738..10edcac7e8a9b 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -19,6 +19,8 @@ in :pep:`557`. The member variables to use in these generated methods are defined using :pep:`526` type annotations. For example this code:: + from dataclasses import dataclass + @dataclass class InventoryItem: '''Class for keeping track of an item in inventory.''' From webhook-mailer at python.org Fri May 8 07:52:53 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 08 May 2020 11:52:53 -0000 Subject: [Python-checkins] Make the first dataclass example more useful (GH-19994) (GH-19998) Message-ID: https://github.com/python/cpython/commit/5d555fa4ef4985709c0eeb49f182e6b9a8de747c commit: 5d555fa4ef4985709c0eeb49f182e6b9a8de747c branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-08T07:52:48-04:00 summary: Make the first dataclass example more useful (GH-19994) (GH-19998) (cherry picked from commit 2effef7453986bf43a6d921cd471a8bc0722c36a) Co-authored-by: Ned Batchelder Co-authored-by: Ned Batchelder files: M Doc/library/dataclasses.rst diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index 37258d4ebc738..10edcac7e8a9b 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -19,6 +19,8 @@ in :pep:`557`. The member variables to use in these generated methods are defined using :pep:`526` type annotations. For example this code:: + from dataclasses import dataclass + @dataclass class InventoryItem: '''Class for keeping track of an item in inventory.''' From webhook-mailer at python.org Fri May 8 10:53:19 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Fri, 08 May 2020 14:53:19 -0000 Subject: [Python-checkins] bpo-40541: Add optional *counts* parameter to random.sample() (GH-19970) Message-ID: https://github.com/python/cpython/commit/81a5fc38e81b424869f4710f48e9371dfa2d3b77 commit: 81a5fc38e81b424869f4710f48e9371dfa2d3b77 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-08T07:53:15-07:00 summary: bpo-40541: Add optional *counts* parameter to random.sample() (GH-19970) files: A Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst M Doc/library/random.rst M Lib/random.py M Lib/test/test_random.py diff --git a/Doc/library/random.rst b/Doc/library/random.rst index f37bc2a111d95..90366f499cae6 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -217,7 +217,7 @@ Functions for sequences The optional parameter *random*. -.. function:: sample(population, k) +.. function:: sample(population, k, *, counts=None) Return a *k* length list of unique elements chosen from the population sequence or set. Used for random sampling without replacement. @@ -231,6 +231,11 @@ Functions for sequences Members of the population need not be :term:`hashable` or unique. If the population contains repeats, then each occurrence is a possible selection in the sample. + Repeated elements can be specified one at a time or with the optional + keyword-only *counts* parameter. For example, ``sample(['red', 'blue'], + counts=[4, 2], k=5)`` is equivalent to ``sample(['red', 'red', 'red', 'red', + 'blue', 'blue'], k=5)``. + To choose a sample from a range of integers, use a :func:`range` object as an argument. This is especially fast and space efficient for sampling from a large population: ``sample(range(10000000), k=60)``. @@ -238,6 +243,9 @@ Functions for sequences If the sample size is larger than the population size, a :exc:`ValueError` is raised. + .. versionchanged:: 3.9 + Added the *counts* parameter. + .. deprecated:: 3.9 In the future, the *population* must be a sequence. Instances of :class:`set` are no longer supported. The set must first be converted @@ -420,12 +428,11 @@ Simulations:: >>> choices(['red', 'black', 'green'], [18, 18, 2], k=6) ['red', 'green', 'black', 'black', 'red', 'black'] - >>> # Deal 20 cards without replacement from a deck of 52 playing cards - >>> # and determine the proportion of cards with a ten-value - >>> # (a ten, jack, queen, or king). - >>> deck = collections.Counter(tens=16, low_cards=36) - >>> seen = sample(list(deck.elements()), k=20) - >>> seen.count('tens') / 20 + >>> # Deal 20 cards without replacement from a deck + >>> # of 52 playing cards, and determine the proportion of cards + >>> # with a ten-value: ten, jack, queen, or king. + >>> dealt = sample(['tens', 'low cards'], counts=[16, 36], k=20) + >>> dealt.count('tens') / 20 0.15 >>> # Estimate the probability of getting 5 or more heads from 7 spins diff --git a/Lib/random.py b/Lib/random.py index f2c4f39fb6079..75f70d5d699ed 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -331,7 +331,7 @@ def shuffle(self, x, random=None): j = _int(random() * (i+1)) x[i], x[j] = x[j], x[i] - def sample(self, population, k): + def sample(self, population, k, *, counts=None): """Chooses k unique random elements from a population sequence or set. Returns a new list containing elements from the population while @@ -344,9 +344,21 @@ def sample(self, population, k): population contains repeats, then each occurrence is a possible selection in the sample. - To choose a sample in a range of integers, use range as an argument. - This is especially fast and space efficient for sampling from a - large population: sample(range(10000000), 60) + Repeated elements can be specified one at a time or with the optional + counts parameter. For example: + + sample(['red', 'blue'], counts=[4, 2], k=5) + + is equivalent to: + + sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) + + To choose a sample from a range of integers, use range() for the + population argument. This is especially fast and space efficient + for sampling from a large population: + + sample(range(10000000), 60) + """ # Sampling without replacement entails tracking either potential @@ -379,8 +391,20 @@ def sample(self, population, k): population = tuple(population) if not isinstance(population, _Sequence): raise TypeError("Population must be a sequence. For dicts or sets, use sorted(d).") - randbelow = self._randbelow n = len(population) + if counts is not None: + cum_counts = list(_accumulate(counts)) + if len(cum_counts) != n: + raise ValueError('The number of counts does not match the population') + total = cum_counts.pop() + if not isinstance(total, int): + raise TypeError('Counts must be integers') + if total <= 0: + raise ValueError('Total of counts must be greater than zero') + selections = sample(range(total), k=k) + bisect = _bisect + return [population[bisect(cum_counts, s)] for s in selections] + randbelow = self._randbelow if not 0 <= k <= n: raise ValueError("Sample larger than population or is negative") result = [None] * k diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py index bb95ca0884a51..a3710f4aa48a6 100644 --- a/Lib/test/test_random.py +++ b/Lib/test/test_random.py @@ -9,7 +9,7 @@ from math import log, exp, pi, fsum, sin, factorial from test import support from fractions import Fraction - +from collections import Counter class TestBasicOps: # Superclass with tests common to all generators. @@ -161,6 +161,77 @@ def test_sample_on_sets(self): population = {10, 20, 30, 40, 50, 60, 70} self.gen.sample(population, k=5) + def test_sample_with_counts(self): + sample = self.gen.sample + + # General case + colors = ['red', 'green', 'blue', 'orange', 'black', 'brown', 'amber'] + counts = [500, 200, 20, 10, 5, 0, 1 ] + k = 700 + summary = Counter(sample(colors, counts=counts, k=k)) + self.assertEqual(sum(summary.values()), k) + for color, weight in zip(colors, counts): + self.assertLessEqual(summary[color], weight) + self.assertNotIn('brown', summary) + + # Case that exhausts the population + k = sum(counts) + summary = Counter(sample(colors, counts=counts, k=k)) + self.assertEqual(sum(summary.values()), k) + for color, weight in zip(colors, counts): + self.assertLessEqual(summary[color], weight) + self.assertNotIn('brown', summary) + + # Case with population size of 1 + summary = Counter(sample(['x'], counts=[10], k=8)) + self.assertEqual(summary, Counter(x=8)) + + # Case with all counts equal. + nc = len(colors) + summary = Counter(sample(colors, counts=[10]*nc, k=10*nc)) + self.assertEqual(summary, Counter(10*colors)) + + # Test error handling + with self.assertRaises(TypeError): + sample(['red', 'green', 'blue'], counts=10, k=10) # counts not iterable + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[-3, -7, -8], k=2) # counts are negative + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[0, 0, 0], k=2) # counts are zero + with self.assertRaises(ValueError): + sample(['red', 'green'], counts=[10, 10], k=21) # population too small + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[1, 2], k=2) # too few counts + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[1, 2, 3, 4], k=2) # too many counts + + def test_sample_counts_equivalence(self): + # Test the documented strong equivalence to a sample with repeated elements. + # We run this test on random.Random() which makes deterministic selections + # for a given seed value. + sample = random.sample + seed = random.seed + + colors = ['red', 'green', 'blue', 'orange', 'black', 'amber'] + counts = [500, 200, 20, 10, 5, 1 ] + k = 700 + seed(8675309) + s1 = sample(colors, counts=counts, k=k) + seed(8675309) + expanded = [color for (color, count) in zip(colors, counts) for i in range(count)] + self.assertEqual(len(expanded), sum(counts)) + s2 = sample(expanded, k=k) + self.assertEqual(s1, s2) + + pop = 'abcdefghi' + counts = [10, 9, 8, 7, 6, 5, 4, 3, 2] + seed(8675309) + s1 = ''.join(sample(pop, counts=counts, k=30)) + expanded = ''.join([letter for (letter, count) in zip(pop, counts) for i in range(count)]) + seed(8675309) + s2 = ''.join(sample(expanded, k=30)) + self.assertEqual(s1, s2) + def test_choices(self): choices = self.gen.choices data = ['red', 'green', 'blue', 'yellow'] diff --git a/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst b/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst new file mode 100644 index 0000000000000..a2e694ac1ad08 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst @@ -0,0 +1 @@ +Added an optional *counts* parameter to random.sample(). From webhook-mailer at python.org Fri May 8 16:58:36 2020 From: webhook-mailer at python.org (Joannah Nanjekye) Date: Fri, 08 May 2020 20:58:36 -0000 Subject: [Python-checkins] bpo-40502: Initialize n->n_col_offset (GH-19988) Message-ID: https://github.com/python/cpython/commit/d10091aa171250c67a5079abfe26b8b3964ea39a commit: d10091aa171250c67a5079abfe26b8b3964ea39a branch: master author: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> committer: GitHub date: 2020-05-08T17:58:28-03:00 summary: bpo-40502: Initialize n->n_col_offset (GH-19988) * initialize n->n_col_offset * ?? Added by blurb_it. * Move initialization Co-authored-by: nanjekyejoannah Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> files: A Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst M Parser/node.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst new file mode 100644 index 0000000000000..b0ea60234634c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst @@ -0,0 +1,2 @@ +Initialize ``n->n_col_offset``. +(Patch by Joannah Nanjekye) \ No newline at end of file diff --git a/Parser/node.c b/Parser/node.c index f1b70e0f6815b..8789e01e9b848 100644 --- a/Parser/node.c +++ b/Parser/node.c @@ -14,6 +14,7 @@ PyNode_New(int type) n->n_str = NULL; n->n_lineno = 0; n->n_end_lineno = 0; + n->n_col_offset = 0; n->n_end_col_offset = -1; n->n_nchildren = 0; n->n_child = NULL; From webhook-mailer at python.org Fri May 8 19:20:30 2020 From: webhook-mailer at python.org (Jason R. Coombs) Date: Fri, 08 May 2020 23:20:30 -0000 Subject: [Python-checkins] bpo-39791: Add files() to importlib.resources (GH-19722) Message-ID: https://github.com/python/cpython/commit/7f7e706d78ab968a1221c6179dfdba714860bd12 commit: 7f7e706d78ab968a1221c6179dfdba714860bd12 branch: master author: Jason R. Coombs committer: GitHub date: 2020-05-08T19:20:26-04:00 summary: bpo-39791: Add files() to importlib.resources (GH-19722) * bpo-39791: Update importlib.resources to support files() API (importlib_resources 1.5). * ?? Added by blurb_it. * Add some documentation about the new objects added. Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> files: A Lib/importlib/_common.py A Lib/test/test_importlib/test_files.py A Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst M Doc/library/importlib.rst M Lib/importlib/abc.py M Lib/importlib/resources.py M Lib/test/test_importlib/test_path.py diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index a612b1e1455a0..99bfeacbbc740 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -480,6 +480,8 @@ ABC hierarchy:: .. class:: ResourceReader + *Superseded by TraversableReader* + An :term:`abstract base class` to provide the ability to read *resources*. @@ -795,6 +797,28 @@ ABC hierarchy:: itself does not end in ``__init__``. +.. class:: Traversable + + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + + .. versionadded:: 3.9 + + +.. class:: TraversableReader + + An abstract base class for resource readers capable of serving + the ``files`` interface. Subclasses ResourceReader and provides + concrete implementations of the ResourceReader's abstract + methods. Therefore, any loader supplying TraversableReader + also supplies ResourceReader. + + Loaders that wish to support resource reading are expected to + implement this interface. + + .. versionadded:: 3.9 + + :mod:`importlib.resources` -- Resources --------------------------------------- @@ -853,6 +877,19 @@ The following types are defined. The following functions are available. + +.. function:: files(package) + + Returns an :class:`importlib.resources.abc.Traversable` object + representing the resource container for the package (think directory) + and its resources (think files). A Traversable may contain other + containers (think subdirectories). + + *package* is either a name or a module object which conforms to the + ``Package`` requirements. + + .. versionadded:: 3.9 + .. function:: open_binary(package, resource) Open for binary reading the *resource* within *package*. diff --git a/Lib/importlib/_common.py b/Lib/importlib/_common.py new file mode 100644 index 0000000000000..ba7cbac3c9bfd --- /dev/null +++ b/Lib/importlib/_common.py @@ -0,0 +1,72 @@ +import os +import pathlib +import zipfile +import tempfile +import functools +import contextlib + + +def from_package(package): + """ + Return a Traversable object for the given package. + + """ + spec = package.__spec__ + return from_traversable_resources(spec) or fallback_resources(spec) + + +def from_traversable_resources(spec): + """ + If the spec.loader implements TraversableResources, + directly or implicitly, it will have a ``files()`` method. + """ + with contextlib.suppress(AttributeError): + return spec.loader.files() + + +def fallback_resources(spec): + package_directory = pathlib.Path(spec.origin).parent + try: + archive_path = spec.loader.archive + rel_path = package_directory.relative_to(archive_path) + return zipfile.Path(archive_path, str(rel_path) + '/') + except Exception: + pass + return package_directory + + + at contextlib.contextmanager +def _tempfile(reader, suffix=''): + # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' + # blocks due to the need to close the temporary file to work on Windows + # properly. + fd, raw_path = tempfile.mkstemp(suffix=suffix) + try: + os.write(fd, reader()) + os.close(fd) + yield pathlib.Path(raw_path) + finally: + try: + os.remove(raw_path) + except FileNotFoundError: + pass + + + at functools.singledispatch + at contextlib.contextmanager +def as_file(path): + """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ + with _tempfile(path.read_bytes, suffix=path.name) as local: + yield local + + + at as_file.register(pathlib.Path) + at contextlib.contextmanager +def _(path): + """ + Degenerate behavior for pathlib.Path objects. + """ + yield path diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index b1b5ccce4bd35..b8a9bb1a21ef7 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -14,6 +14,7 @@ _frozen_importlib_external = _bootstrap_external import abc import warnings +from typing import Protocol, runtime_checkable def _register(abstract_cls, *classes): @@ -386,3 +387,88 @@ def contents(self): _register(ResourceReader, machinery.SourceFileLoader) + + + at runtime_checkable +class Traversable(Protocol): + """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + """ + + @abc.abstractmethod + def iterdir(self): + """ + Yield Traversable objects in self + """ + + @abc.abstractmethod + def read_bytes(self): + """ + Read contents of self as bytes + """ + + @abc.abstractmethod + def read_text(self, encoding=None): + """ + Read contents of self as bytes + """ + + @abc.abstractmethod + def is_dir(self): + """ + Return True if self is a dir + """ + + @abc.abstractmethod + def is_file(self): + """ + Return True if self is a file + """ + + @abc.abstractmethod + def joinpath(self, child): + """ + Return Traversable child in self + """ + + @abc.abstractmethod + def __truediv__(self, child): + """ + Return Traversable child in self + """ + + @abc.abstractmethod + def open(self, mode='r', *args, **kwargs): + """ + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ + + @abc.abstractproperty + def name(self): + # type: () -> str + """ + The base name of this object without any parent references. + """ + + +class TraversableResources(ResourceReader): + @abc.abstractmethod + def files(self): + """Return a Traversable object for the loaded package.""" + + def open_resource(self, resource): + return self.files().joinpath(resource).open('rb') + + def resource_path(self, resource): + raise FileNotFoundError(resource) + + def is_resource(self, path): + return self.files().joinpath(path).isfile() + + def contents(self): + return (item.name for item in self.files().iterdir()) diff --git a/Lib/importlib/resources.py b/Lib/importlib/resources.py index f51886557466c..b803a01c91d65 100644 --- a/Lib/importlib/resources.py +++ b/Lib/importlib/resources.py @@ -1,14 +1,15 @@ import os -import tempfile from . import abc as resources_abc +from . import _common +from ._common import as_file from contextlib import contextmanager, suppress from importlib import import_module from importlib.abc import ResourceLoader from io import BytesIO, TextIOWrapper from pathlib import Path from types import ModuleType -from typing import Iterable, Iterator, Optional, Union # noqa: F401 +from typing import ContextManager, Iterable, Optional, Union from typing import cast from typing.io import BinaryIO, TextIO @@ -16,7 +17,9 @@ __all__ = [ 'Package', 'Resource', + 'as_file', 'contents', + 'files', 'is_resource', 'open_binary', 'open_text', @@ -30,24 +33,23 @@ Resource = Union[str, os.PathLike] +def _resolve(name) -> ModuleType: + """If name is a string, resolve to a module.""" + if hasattr(name, '__spec__'): + return name + return import_module(name) + + def _get_package(package) -> ModuleType: """Take a package name or module object and return the module. - If a name, the module is imported. If the passed or imported module + If a name, the module is imported. If the resolved module object is not a package, raise an exception. """ - if hasattr(package, '__spec__'): - if package.__spec__.submodule_search_locations is None: - raise TypeError('{!r} is not a package'.format( - package.__spec__.name)) - else: - return package - else: - module = import_module(package) - if module.__spec__.submodule_search_locations is None: - raise TypeError('{!r} is not a package'.format(package)) - else: - return module + module = _resolve(package) + if module.__spec__.submodule_search_locations is None: + raise TypeError('{!r} is not a package'.format(package)) + return module def _normalize_path(path) -> str: @@ -58,8 +60,7 @@ def _normalize_path(path) -> str: parent, file_name = os.path.split(path) if parent: raise ValueError('{!r} must be only a file name'.format(path)) - else: - return file_name + return file_name def _get_resource_reader( @@ -88,8 +89,8 @@ def open_binary(package: Package, resource: Resource) -> BinaryIO: reader = _get_resource_reader(package) if reader is not None: return reader.open_resource(resource) - _check_location(package) - absolute_package_path = os.path.abspath(package.__spec__.origin) + absolute_package_path = os.path.abspath( + package.__spec__.origin or 'non-existent file') package_path = os.path.dirname(absolute_package_path) full_path = os.path.join(package_path, resource) try: @@ -108,8 +109,7 @@ def open_binary(package: Package, resource: Resource) -> BinaryIO: message = '{!r} resource not found in {!r}'.format( resource, package_name) raise FileNotFoundError(message) - else: - return BytesIO(data) + return BytesIO(data) def open_text(package: Package, @@ -117,39 +117,12 @@ def open_text(package: Package, encoding: str = 'utf-8', errors: str = 'strict') -> TextIO: """Return a file-like object opened for text reading of the resource.""" - resource = _normalize_path(resource) - package = _get_package(package) - reader = _get_resource_reader(package) - if reader is not None: - return TextIOWrapper(reader.open_resource(resource), encoding, errors) - _check_location(package) - absolute_package_path = os.path.abspath(package.__spec__.origin) - package_path = os.path.dirname(absolute_package_path) - full_path = os.path.join(package_path, resource) - try: - return open(full_path, mode='r', encoding=encoding, errors=errors) - except OSError: - # Just assume the loader is a resource loader; all the relevant - # importlib.machinery loaders are and an AttributeError for - # get_data() will make it clear what is needed from the loader. - loader = cast(ResourceLoader, package.__spec__.loader) - data = None - if hasattr(package.__spec__.loader, 'get_data'): - with suppress(OSError): - data = loader.get_data(full_path) - if data is None: - package_name = package.__spec__.name - message = '{!r} resource not found in {!r}'.format( - resource, package_name) - raise FileNotFoundError(message) - else: - return TextIOWrapper(BytesIO(data), encoding, errors) + return TextIOWrapper( + open_binary(package, resource), encoding=encoding, errors=errors) def read_binary(package: Package, resource: Resource) -> bytes: """Return the binary contents of the resource.""" - resource = _normalize_path(resource) - package = _get_package(package) with open_binary(package, resource) as fp: return fp.read() @@ -163,14 +136,20 @@ def read_text(package: Package, The decoding-related arguments have the same semantics as those of bytes.decode(). """ - resource = _normalize_path(resource) - package = _get_package(package) with open_text(package, resource, encoding, errors) as fp: return fp.read() - at contextmanager -def path(package: Package, resource: Resource) -> Iterator[Path]: +def files(package: Package) -> resources_abc.Traversable: + """ + Get a Traversable resource from a package + """ + return _common.from_package(_get_package(package)) + + +def path( + package: Package, resource: Resource, + ) -> 'ContextManager[Path]': """A context manager providing a file path object to the resource. If the resource does not already exist on its own on the file system, @@ -179,39 +158,23 @@ def path(package: Package, resource: Resource) -> Iterator[Path]: raised if the file was deleted prior to the context manager exiting). """ - resource = _normalize_path(resource) - package = _get_package(package) - reader = _get_resource_reader(package) - if reader is not None: - try: - yield Path(reader.resource_path(resource)) - return - except FileNotFoundError: - pass - else: - _check_location(package) - # Fall-through for both the lack of resource_path() *and* if - # resource_path() raises FileNotFoundError. - package_directory = Path(package.__spec__.origin).parent - file_path = package_directory / resource - if file_path.exists(): - yield file_path - else: - with open_binary(package, resource) as fp: - data = fp.read() - # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' - # blocks due to the need to close the temporary file to work on - # Windows properly. - fd, raw_path = tempfile.mkstemp() - try: - os.write(fd, data) - os.close(fd) - yield Path(raw_path) - finally: - try: - os.remove(raw_path) - except FileNotFoundError: - pass + reader = _get_resource_reader(_get_package(package)) + return ( + _path_from_reader(reader, resource) + if reader else + _common.as_file(files(package).joinpath(_normalize_path(resource))) + ) + + + at contextmanager +def _path_from_reader(reader, resource): + norm_resource = _normalize_path(resource) + with suppress(FileNotFoundError): + yield Path(reader.resource_path(norm_resource)) + return + opener_reader = reader.open_resource(norm_resource) + with _common._tempfile(opener_reader.read, suffix=norm_resource) as res: + yield res def is_resource(package: Package, name: str) -> bool: @@ -224,17 +187,10 @@ def is_resource(package: Package, name: str) -> bool: reader = _get_resource_reader(package) if reader is not None: return reader.is_resource(name) - try: - package_contents = set(contents(package)) - except (NotADirectoryError, FileNotFoundError): - return False + package_contents = set(contents(package)) if name not in package_contents: return False - # Just because the given file_name lives as an entry in the package's - # contents doesn't necessarily mean it's a resource. Directories are not - # resources, so let's try to find out if it's a directory or not. - path = Path(package.__spec__.origin).parent / name - return path.is_file() + return (_common.from_package(package) / name).is_file() def contents(package: Package) -> Iterable[str]: @@ -249,10 +205,11 @@ def contents(package: Package) -> Iterable[str]: if reader is not None: return reader.contents() # Is the package a namespace package? By definition, namespace packages - # cannot have resources. We could use _check_location() and catch the - # exception, but that's extra work, so just inline the check. - elif package.__spec__.origin is None or not package.__spec__.has_location: + # cannot have resources. + namespace = ( + package.__spec__.origin is None or + package.__spec__.origin == 'namespace' + ) + if namespace or not package.__spec__.has_location: return () - else: - package_directory = Path(package.__spec__.origin).parent - return os.listdir(package_directory) + return list(item.name for item in _common.from_package(package).iterdir()) diff --git a/Lib/test/test_importlib/test_files.py b/Lib/test/test_importlib/test_files.py new file mode 100644 index 0000000000000..fa7af82bf0c28 --- /dev/null +++ b/Lib/test/test_importlib/test_files.py @@ -0,0 +1,39 @@ +import typing +import unittest + +from importlib import resources +from importlib.abc import Traversable +from . import data01 +from . import util + + +class FilesTests: + def test_read_bytes(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_bytes() + assert actual == b'Hello, UTF-8 world!\n' + + def test_read_text(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_text() + assert actual == 'Hello, UTF-8 world!\n' + + @unittest.skipUnless( + hasattr(typing, 'runtime_checkable'), + "Only suitable when typing supports runtime_checkable", + ) + def test_traversable(self): + assert isinstance(resources.files(self.data), Traversable) + + +class OpenDiskTests(FilesTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/Lib/test/test_importlib/test_path.py b/Lib/test/test_importlib/test_path.py index 2d3dcda7ed2e7..c4e7285411322 100644 --- a/Lib/test/test_importlib/test_path.py +++ b/Lib/test/test_importlib/test_path.py @@ -17,6 +17,7 @@ def test_reading(self): # Test also implicitly verifies the returned object is a pathlib.Path # instance. with resources.path(self.data, 'utf-8.file') as path: + self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) # pathlib.Path.read_text() was introduced in Python 3.5. with path.open('r', encoding='utf-8') as file: text = file.read() diff --git a/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst b/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst new file mode 100644 index 0000000000000..237bcf7f99b0f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst @@ -0,0 +1 @@ +Added ``files()`` function to importlib.resources with support for subdirectories in package data, matching backport in importlib_resources 1.5. \ No newline at end of file From webhook-mailer at python.org Sat May 9 04:32:04 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Sat, 09 May 2020 08:32:04 -0000 Subject: [Python-checkins] bpo-40566: Apply PEP 573 to abc module (GH-20005) Message-ID: https://github.com/python/cpython/commit/77c614624b6bf2145bef69830d0f499d8b55ec0c commit: 77c614624b6bf2145bef69830d0f499d8b55ec0c branch: master author: Dong-hee Na committer: GitHub date: 2020-05-09T01:31:40-07:00 summary: bpo-40566: Apply PEP 573 to abc module (GH-20005) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst M Modules/_abc.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst new file mode 100644 index 0000000000000..92a5e3ce63217 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst @@ -0,0 +1 @@ +Apply :pep:`573` to :mod:`abc`. diff --git a/Modules/_abc.c b/Modules/_abc.c index 7c040ef80ba3d..434bc454175b5 100644 --- a/Modules/_abc.c +++ b/Modules/_abc.c @@ -21,16 +21,9 @@ _Py_IDENTIFIER(__subclasshook__); typedef struct { PyTypeObject *_abc_data_type; + unsigned long long abc_invalidation_counter; } _abcmodule_state; -/* A global counter that is incremented each time a class is - registered as a virtual subclass of anything. It forces the - negative cache to be cleared before its next use. - Note: this counter is private. Use `abc.get_cache_token()` for - external code. */ -// FIXME: PEP 573: Move abc_invalidation_counter into _abcmodule_state. -static unsigned long long abc_invalidation_counter = 0; - static inline _abcmodule_state* get_abc_state(PyObject *module) { @@ -81,14 +74,21 @@ static PyObject * abc_data_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { _abc_data *self = (_abc_data *) type->tp_alloc(type, 0); + _abcmodule_state *state = NULL; if (self == NULL) { return NULL; } + state = PyType_GetModuleState(type); + if (state == NULL) { + Py_DECREF(self); + return NULL; + } + self->_abc_registry = NULL; self->_abc_cache = NULL; self->_abc_negative_cache = NULL; - self->_abc_negative_cache_version = abc_invalidation_counter; + self->_abc_negative_cache_version = state->abc_invalidation_counter; return (PyObject *) self; } @@ -495,7 +495,7 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass) Py_DECREF(impl); /* Invalidate negative cache */ - abc_invalidation_counter++; + get_abc_state(module)->abc_invalidation_counter++; Py_INCREF(subclass); return subclass; @@ -540,7 +540,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self, } subtype = (PyObject *)Py_TYPE(instance); if (subtype == subclass) { - if (impl->_abc_negative_cache_version == abc_invalidation_counter) { + if (impl->_abc_negative_cache_version == get_abc_state(module)->abc_invalidation_counter) { incache = _in_weak_set(impl->_abc_negative_cache, subclass); if (incache < 0) { goto end; @@ -612,6 +612,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self, } PyObject *ok, *subclasses = NULL, *result = NULL; + _abcmodule_state *state = NULL; Py_ssize_t pos; int incache; _abc_data *impl = _get_impl(module, self); @@ -629,15 +630,16 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self, goto end; } + state = get_abc_state(module); /* 2. Check negative cache; may have to invalidate. */ - if (impl->_abc_negative_cache_version < abc_invalidation_counter) { + if (impl->_abc_negative_cache_version < state->abc_invalidation_counter) { /* Invalidate the negative cache. */ if (impl->_abc_negative_cache != NULL && PySet_Clear(impl->_abc_negative_cache) < 0) { goto end; } - impl->_abc_negative_cache_version = abc_invalidation_counter; + impl->_abc_negative_cache_version = state->abc_invalidation_counter; } else { incache = _in_weak_set(impl->_abc_negative_cache, subclass); @@ -830,7 +832,8 @@ static PyObject * _abc_get_cache_token_impl(PyObject *module) /*[clinic end generated code: output=c7d87841e033dacc input=70413d1c423ad9f9]*/ { - return PyLong_FromUnsignedLongLong(abc_invalidation_counter); + _abcmodule_state *state = get_abc_state(module); + return PyLong_FromUnsignedLongLong(state->abc_invalidation_counter); } static struct PyMethodDef _abcmodule_methods[] = { @@ -849,7 +852,8 @@ static int _abcmodule_exec(PyObject *module) { _abcmodule_state *state = get_abc_state(module); - state->_abc_data_type = (PyTypeObject *)PyType_FromSpec(&_abc_data_type_spec); + state->abc_invalidation_counter = 0; + state->_abc_data_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, &_abc_data_type_spec, NULL); if (state->_abc_data_type == NULL) { return -1; } From webhook-mailer at python.org Sat May 9 10:12:46 2020 From: webhook-mailer at python.org (Jason R. Coombs) Date: Sat, 09 May 2020 14:12:46 -0000 Subject: [Python-checkins] bpo-40570: Improve compatibility of uname_result with late-bound .platform (#20015) Message-ID: https://github.com/python/cpython/commit/2c3d508c5fabe40dac848fb9ae558069f0576879 commit: 2c3d508c5fabe40dac848fb9ae558069f0576879 branch: master author: Jason R. Coombs committer: GitHub date: 2020-05-09T10:12:41-04:00 summary: bpo-40570: Improve compatibility of uname_result with late-bound .platform (#20015) * bpo-40570: Improve compatibility of uname_result with late-bound .platform. * Add test capturing ability to cast uname to a tuple. files: M Lib/platform.py M Lib/test/test_platform.py diff --git a/Lib/platform.py b/Lib/platform.py index 049c2c6ef25a1..e9f50ab622d31 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -798,9 +798,10 @@ def __iter__(self): ) def __getitem__(self, key): - if key == 5: - return self.processor - return super().__getitem__(key) + return tuple(iter(self))[key] + + def __len__(self): + return len(tuple(iter(self))) _uname_cache = None diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index 7664b38a720a7..a5c35dff79b8b 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -154,11 +154,26 @@ def test_uname(self): res = platform.uname() self.assertTrue(any(res)) self.assertEqual(res[0], res.system) + self.assertEqual(res[-6], res.system) self.assertEqual(res[1], res.node) + self.assertEqual(res[-5], res.node) self.assertEqual(res[2], res.release) + self.assertEqual(res[-4], res.release) self.assertEqual(res[3], res.version) + self.assertEqual(res[-3], res.version) self.assertEqual(res[4], res.machine) + self.assertEqual(res[-2], res.machine) self.assertEqual(res[5], res.processor) + self.assertEqual(res[-1], res.processor) + self.assertEqual(len(res), 6) + + def test_uname_cast_to_tuple(self): + res = platform.uname() + expected = ( + res.system, res.node, res.release, res.version, res.machine, + res.processor, + ) + self.assertEqual(tuple(res), expected) @unittest.skipIf(sys.platform in ['win32', 'OpenVMS'], "uname -p not used") def test_uname_processor(self): From webhook-mailer at python.org Sun May 10 00:35:00 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sun, 10 May 2020 04:35:00 -0000 Subject: [Python-checkins] bpo-40334: Avoid collisions between parser variables and grammar variables (GH-19987) Message-ID: https://github.com/python/cpython/commit/ac7a92cc0a821699df48bc2e30a02c25d6338f78 commit: ac7a92cc0a821699df48bc2e30a02c25d6338f78 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-09T21:34:50-07:00 summary: bpo-40334: Avoid collisions between parser variables and grammar variables (GH-19987) This is for the C generator: - Disallow rule and variable names starting with `_` - Rename most local variable names generated by the parser to start with `_` Exceptions: - Renaming `p` to `_p` will be a separate PR - There are still some names that might clash, e.g. - anything starting with `Py` - C reserved words (`if` etc.) - Macros like `EXTRA` and `CHECK` files: M Lib/test/test_peg_generator/test_pegen.py M Parser/pegen/parse.c M Parser/pegen/pegen.h M Tools/peg_generator/pegen/c_generator.py M Tools/peg_generator/pegen/parser_generator.py diff --git a/Lib/test/test_peg_generator/test_pegen.py b/Lib/test/test_peg_generator/test_pegen.py index 0a2a6d4ae1601..30e1b675643b2 100644 --- a/Lib/test/test_peg_generator/test_pegen.py +++ b/Lib/test/test_peg_generator/test_pegen.py @@ -540,6 +540,33 @@ def test_missing_start(self) -> None: with self.assertRaises(GrammarError): parser_class = make_parser(grammar) + def test_invalid_rule_name(self) -> None: + grammar = """ + start: _a b + _a: 'a' + b: 'b' + """ + with self.assertRaisesRegex(GrammarError, "cannot start with underscore: '_a'"): + parser_class = make_parser(grammar) + + def test_invalid_variable_name(self) -> None: + grammar = """ + start: a b + a: _x='a' + b: 'b' + """ + with self.assertRaisesRegex(GrammarError, "cannot start with underscore: '_x'"): + parser_class = make_parser(grammar) + + def test_invalid_variable_name_in_temporal_rule(self) -> None: + grammar = """ + start: a b + a: (_x='a' | 'b') | 'c' + b: 'b' + """ + with self.assertRaisesRegex(GrammarError, "cannot start with underscore: '_x'"): + parser_class = make_parser(grammar) + class TestGrammarVisitor: class Visitor(GrammarVisitor): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index ae86841e8663b..27feda73d99e1 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -674,8 +674,8 @@ file_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // statements? $ void *a; Token * endmarker_var; @@ -685,18 +685,18 @@ file_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = _PyPegen_make_module ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_module ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // interactive: statement_newline @@ -706,26 +706,26 @@ interactive_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // statement_newline asdl_seq* a; if ( (a = statement_newline_rule(p)) // statement_newline ) { - res = Interactive ( a , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = Interactive ( a , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // eval: expressions NEWLINE* $ @@ -735,8 +735,8 @@ eval_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // expressions NEWLINE* $ asdl_seq * _loop0_1_var; expr_ty a; @@ -749,18 +749,18 @@ eval_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = Expression ( a , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = Expression ( a , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // func_type: '(' type_expressions? ')' '->' expression NEWLINE* $ @@ -770,24 +770,24 @@ func_type_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // '(' type_expressions? ')' '->' expression NEWLINE* $ + Token * _literal; + Token * _literal_1; + Token * _literal_2; asdl_seq * _loop0_2_var; void *a; expr_ty b; Token * endmarker_var; - Token * literal; - Token * literal_1; - Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = type_expressions_rule(p), 1) // type_expressions? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 51)) // token='->' + (_literal_2 = _PyPegen_expect_token(p, 51)) // token='->' && (b = expression_rule(p)) // expression && @@ -796,18 +796,18 @@ func_type_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = FunctionType ( a , b , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = FunctionType ( a , b , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // fstring: star_expressions @@ -817,22 +817,22 @@ fstring_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // star_expressions expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // type_expressions: @@ -849,151 +849,151 @@ type_expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.expression+ ',' '*' expression ',' '**' expression + Token * _literal; + Token * _literal_1; + Token * _literal_2; + Token * _literal_3; asdl_seq * a; expr_ty b; expr_ty c; - Token * literal; - Token * literal_1; - Token * literal_2; - Token * literal_3; if ( (a = _gather_3_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && (b = expression_rule(p)) // expression && - (literal_2 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_2 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_3 = _PyPegen_expect_token(p, 35)) // token='**' + (_literal_3 = _PyPegen_expect_token(p, 35)) // token='**' && (c = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.expression+ ',' '*' expression + Token * _literal; + Token * _literal_1; asdl_seq * a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = _gather_5_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && (b = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.expression+ ',' '**' expression + Token * _literal; + Token * _literal_1; asdl_seq * a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = _gather_7_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 35)) // token='**' + (_literal_1 = _PyPegen_expect_token(p, 35)) // token='**' && (b = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' expression ',' '**' expression + Token * _literal; + Token * _literal_1; + Token * _literal_2; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; - Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = expression_rule(p)) // expression && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_2 = _PyPegen_expect_token(p, 35)) // token='**' + (_literal_2 = _PyPegen_expect_token(p, 35)) // token='**' && (b = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = expression_rule(p)) // expression ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = expression_rule(p)) // expression ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.expression+ asdl_seq * _gather_9_var; @@ -1001,14 +1001,14 @@ type_expressions_rule(Parser *p) (_gather_9_var = _gather_9_rule(p)) // ','.expression+ ) { - res = _gather_9_var; + _res = _gather_9_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // statements: statement+ @@ -1018,26 +1018,26 @@ statements_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // statement+ asdl_seq * a; if ( (a = _loop1_11_rule(p)) // statement+ ) { - res = _PyPegen_seq_flatten ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_flatten ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // statement: compound_stmt | simple_stmt @@ -1047,22 +1047,22 @@ statement_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // compound_stmt stmt_ty a; if ( (a = compound_stmt_rule(p)) // compound_stmt ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // simple_stmt asdl_seq* simple_stmt_var; @@ -1070,14 +1070,14 @@ statement_rule(Parser *p) (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { - res = simple_stmt_var; + _res = simple_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // statement_newline: compound_stmt NEWLINE | simple_stmt | NEWLINE | $ @@ -1087,16 +1087,16 @@ statement_newline_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // compound_stmt NEWLINE stmt_ty a; Token * newline_var; @@ -1106,14 +1106,14 @@ statement_newline_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // simple_stmt asdl_seq* simple_stmt_var; @@ -1121,10 +1121,10 @@ statement_newline_rule(Parser *p) (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { - res = simple_stmt_var; + _res = simple_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // NEWLINE Token * newline_var; @@ -1132,22 +1132,22 @@ statement_newline_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_singleton_seq ( p , CHECK ( _Py_Pass ( EXTRA ) ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_singleton_seq ( p , CHECK ( _Py_Pass ( EXTRA ) ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // $ Token * endmarker_var; @@ -1155,18 +1155,18 @@ statement_newline_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = _PyPegen_interactive_exit ( p ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_interactive_exit ( p ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // simple_stmt: small_stmt !';' NEWLINE | ';'.small_stmt+ ';'? NEWLINE @@ -1176,8 +1176,8 @@ simple_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // small_stmt !';' NEWLINE stmt_ty a; Token * newline_var; @@ -1189,40 +1189,40 @@ simple_stmt_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ';'.small_stmt+ ';'? NEWLINE + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; Token * newline_var; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_12_rule(p)) // ';'.small_stmt+ && - (opt_var = _PyPegen_expect_token(p, 13), 1) // ';'? + (_opt_var = _PyPegen_expect_token(p, 13), 1) // ';'? && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // small_stmt: @@ -1245,28 +1245,28 @@ small_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - if (_PyPegen_is_memoized(p, small_stmt_type, &res)) - return res; - int mark = p->mark; + stmt_ty _res = NULL; + if (_PyPegen_is_memoized(p, small_stmt_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // assignment stmt_ty assignment_var; if ( (assignment_var = assignment_rule(p)) // assignment ) { - res = assignment_var; + _res = assignment_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty e; @@ -1274,22 +1274,22 @@ small_stmt_rule(Parser *p) (e = star_expressions_rule(p)) // star_expressions ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Expr ( e , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Expr ( e , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &'return' return_stmt stmt_ty return_stmt_var; @@ -1299,10 +1299,10 @@ small_stmt_rule(Parser *p) (return_stmt_var = return_stmt_rule(p)) // return_stmt ) { - res = return_stmt_var; + _res = return_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('import' | 'from') import_stmt stmt_ty import_stmt_var; @@ -1312,10 +1312,10 @@ small_stmt_rule(Parser *p) (import_stmt_var = import_stmt_rule(p)) // import_stmt ) { - res = import_stmt_var; + _res = import_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'raise' raise_stmt stmt_ty raise_stmt_var; @@ -1325,33 +1325,33 @@ small_stmt_rule(Parser *p) (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt ) { - res = raise_stmt_var; + _res = raise_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'pass' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 502)) // token='pass' + (_keyword = _PyPegen_expect_token(p, 502)) // token='pass' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Pass ( EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Pass ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &'del' del_stmt stmt_ty del_stmt_var; @@ -1361,10 +1361,10 @@ small_stmt_rule(Parser *p) (del_stmt_var = del_stmt_rule(p)) // del_stmt ) { - res = del_stmt_var; + _res = del_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'yield' yield_stmt stmt_ty yield_stmt_var; @@ -1374,10 +1374,10 @@ small_stmt_rule(Parser *p) (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt ) { - res = yield_stmt_var; + _res = yield_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'assert' assert_stmt stmt_ty assert_stmt_var; @@ -1387,56 +1387,56 @@ small_stmt_rule(Parser *p) (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt ) { - res = assert_stmt_var; + _res = assert_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'break' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 506)) // token='break' + (_keyword = _PyPegen_expect_token(p, 506)) // token='break' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Break ( EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Break ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'continue' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 507)) // token='continue' + (_keyword = _PyPegen_expect_token(p, 507)) // token='continue' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Continue ( EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Continue ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &'global' global_stmt stmt_ty global_stmt_var; @@ -1446,10 +1446,10 @@ small_stmt_rule(Parser *p) (global_stmt_var = global_stmt_rule(p)) // global_stmt ) { - res = global_stmt_var; + _res = global_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'nonlocal' nonlocal_stmt stmt_ty nonlocal_stmt_var; @@ -1459,15 +1459,15 @@ small_stmt_rule(Parser *p) (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt ) { - res = nonlocal_stmt_var; + _res = nonlocal_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, small_stmt_type, res); - return res; + _PyPegen_insert_memo(p, _mark, small_stmt_type, _res); + return _res; } // compound_stmt: @@ -1484,8 +1484,8 @@ compound_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // &('def' | '@' | ASYNC) function_def stmt_ty function_def_var; if ( @@ -1494,10 +1494,10 @@ compound_stmt_rule(Parser *p) (function_def_var = function_def_rule(p)) // function_def ) { - res = function_def_var; + _res = function_def_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'if' if_stmt stmt_ty if_stmt_var; @@ -1507,10 +1507,10 @@ compound_stmt_rule(Parser *p) (if_stmt_var = if_stmt_rule(p)) // if_stmt ) { - res = if_stmt_var; + _res = if_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('class' | '@') class_def stmt_ty class_def_var; @@ -1520,10 +1520,10 @@ compound_stmt_rule(Parser *p) (class_def_var = class_def_rule(p)) // class_def ) { - res = class_def_var; + _res = class_def_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('with' | ASYNC) with_stmt stmt_ty with_stmt_var; @@ -1533,10 +1533,10 @@ compound_stmt_rule(Parser *p) (with_stmt_var = with_stmt_rule(p)) // with_stmt ) { - res = with_stmt_var; + _res = with_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('for' | ASYNC) for_stmt stmt_ty for_stmt_var; @@ -1546,10 +1546,10 @@ compound_stmt_rule(Parser *p) (for_stmt_var = for_stmt_rule(p)) // for_stmt ) { - res = for_stmt_var; + _res = for_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'try' try_stmt stmt_ty try_stmt_var; @@ -1559,10 +1559,10 @@ compound_stmt_rule(Parser *p) (try_stmt_var = try_stmt_rule(p)) // try_stmt ) { - res = try_stmt_var; + _res = try_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'while' while_stmt stmt_ty while_stmt_var; @@ -1572,14 +1572,14 @@ compound_stmt_rule(Parser *p) (while_stmt_var = while_stmt_rule(p)) // while_stmt ) { - res = while_stmt_var; + _res = while_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // assignment: @@ -1594,79 +1594,79 @@ assignment_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':' expression ['=' annotated_rhs] + Token * _literal; expr_ty a; expr_ty b; void *c; - Token * literal; if ( (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression && (c = _tmp_19_rule(p), 1) // ['=' annotated_rhs] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 6 , "Variable annotation syntax is" , _Py_AnnAssign ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , c , 1 , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 6 , "Variable annotation syntax is" , _Py_AnnAssign ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , c , 1 , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs] + Token * _literal; void *a; expr_ty b; void *c; - Token * literal; if ( (a = _tmp_20_rule(p)) // '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression && (c = _tmp_21_rule(p), 1) // ['=' annotated_rhs] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 6 , "Variable annotations syntax is" , _Py_AnnAssign ( a , b , c , 0 , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 6 , "Variable annotations syntax is" , _Py_AnnAssign ( a , b , c , 0 , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? asdl_seq * a; @@ -1680,22 +1680,22 @@ assignment_rule(Parser *p) (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Assign ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Assign ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // target augassign (yield_expr | star_expressions) expr_ty a; @@ -1709,22 +1709,22 @@ assignment_rule(Parser *p) (c = _tmp_24_rule(p)) // yield_expr | star_expressions ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_AugAssign ( a , b -> kind , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_AugAssign ( a , b -> kind , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_assignment void *invalid_assignment_var; @@ -1732,14 +1732,14 @@ assignment_rule(Parser *p) (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment ) { - res = invalid_assignment_var; + _res = invalid_assignment_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // augassign: @@ -1762,206 +1762,206 @@ augassign_rule(Parser *p) if (p->error_indicator) { return NULL; } - AugOperator* res = NULL; - int mark = p->mark; + AugOperator* _res = NULL; + int _mark = p->mark; { // '+=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 36)) // token='+=' + (_literal = _PyPegen_expect_token(p, 36)) // token='+=' ) { - res = _PyPegen_augoperator ( p , Add ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Add ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '-=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 37)) // token='-=' + (_literal = _PyPegen_expect_token(p, 37)) // token='-=' ) { - res = _PyPegen_augoperator ( p , Sub ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Sub ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 38)) // token='*=' + (_literal = _PyPegen_expect_token(p, 38)) // token='*=' ) { - res = _PyPegen_augoperator ( p , Mult ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Mult ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '@=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 50)) // token='@=' + (_literal = _PyPegen_expect_token(p, 50)) // token='@=' ) { - res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); - if (res == NULL && PyErr_Occurred()) { + _res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '/=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 39)) // token='/=' + (_literal = _PyPegen_expect_token(p, 39)) // token='/=' ) { - res = _PyPegen_augoperator ( p , Div ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Div ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '%=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 40)) // token='%=' + (_literal = _PyPegen_expect_token(p, 40)) // token='%=' ) { - res = _PyPegen_augoperator ( p , Mod ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Mod ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '&=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 41)) // token='&=' + (_literal = _PyPegen_expect_token(p, 41)) // token='&=' ) { - res = _PyPegen_augoperator ( p , BitAnd ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , BitAnd ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '|=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 42)) // token='|=' + (_literal = _PyPegen_expect_token(p, 42)) // token='|=' ) { - res = _PyPegen_augoperator ( p , BitOr ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , BitOr ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '^=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 43)) // token='^=' + (_literal = _PyPegen_expect_token(p, 43)) // token='^=' ) { - res = _PyPegen_augoperator ( p , BitXor ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , BitXor ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '<<=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 44)) // token='<<=' + (_literal = _PyPegen_expect_token(p, 44)) // token='<<=' ) { - res = _PyPegen_augoperator ( p , LShift ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , LShift ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '>>=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 45)) // token='>>=' + (_literal = _PyPegen_expect_token(p, 45)) // token='>>=' ) { - res = _PyPegen_augoperator ( p , RShift ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , RShift ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '**=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 46)) // token='**=' + (_literal = _PyPegen_expect_token(p, 46)) // token='**=' ) { - res = _PyPegen_augoperator ( p , Pow ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Pow ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '//=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 48)) // token='//=' + (_literal = _PyPegen_expect_token(p, 48)) // token='//=' ) { - res = _PyPegen_augoperator ( p , FloorDiv ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , FloorDiv ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // global_stmt: 'global' ','.NAME+ @@ -1971,45 +1971,45 @@ global_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'global' ','.NAME+ + Token * _keyword; asdl_seq * a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 508)) // token='global' + (_keyword = _PyPegen_expect_token(p, 508)) // token='global' && (a = _gather_25_rule(p)) // ','.NAME+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Global ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Global ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // nonlocal_stmt: 'nonlocal' ','.NAME+ @@ -2019,45 +2019,45 @@ nonlocal_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'nonlocal' ','.NAME+ + Token * _keyword; asdl_seq * a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal' + (_keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal' && (a = _gather_27_rule(p)) // ','.NAME+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Nonlocal ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Nonlocal ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // yield_stmt: yield_expr @@ -2067,42 +2067,42 @@ yield_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // yield_expr expr_ty y; if ( (y = yield_expr_rule(p)) // yield_expr ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Expr ( y , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Expr ( y , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // assert_stmt: 'assert' expression [',' expression] @@ -2112,48 +2112,48 @@ assert_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'assert' expression [',' expression] + Token * _keyword; expr_ty a; void *b; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 505)) // token='assert' + (_keyword = _PyPegen_expect_token(p, 505)) // token='assert' && (a = expression_rule(p)) // expression && (b = _tmp_29_rule(p), 1) // [',' expression] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Assert ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Assert ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // del_stmt: 'del' del_targets @@ -2163,45 +2163,45 @@ del_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'del' del_targets + Token * _keyword; asdl_seq* a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 503)) // token='del' + (_keyword = _PyPegen_expect_token(p, 503)) // token='del' && (a = del_targets_rule(p)) // del_targets ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Delete ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Delete ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_stmt: import_name | import_from @@ -2211,18 +2211,18 @@ import_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // import_name stmt_ty import_name_var; if ( (import_name_var = import_name_rule(p)) // import_name ) { - res = import_name_var; + _res = import_name_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // import_from stmt_ty import_from_var; @@ -2230,14 +2230,14 @@ import_stmt_rule(Parser *p) (import_from_var = import_from_rule(p)) // import_from ) { - res = import_from_var; + _res = import_from_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_name: 'import' dotted_as_names @@ -2247,45 +2247,45 @@ import_name_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'import' dotted_as_names + Token * _keyword; asdl_seq* a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) // token='import' + (_keyword = _PyPegen_expect_token(p, 513)) // token='import' && (a = dotted_as_names_rule(p)) // dotted_as_names ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Import ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Import ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from: @@ -2297,86 +2297,86 @@ import_from_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets + Token * _keyword; + Token * _keyword_1; asdl_seq * a; expr_ty b; asdl_seq* c; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && (a = _loop0_30_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && - (keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && (c = import_from_targets_rule(p)) // import_from_targets ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ImportFrom ( b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ImportFrom ( b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'from' (('.' | '...'))+ 'import' import_from_targets + Token * _keyword; + Token * _keyword_1; asdl_seq * a; asdl_seq* b; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && (a = _loop1_31_rule(p)) // (('.' | '...'))+ && - (keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && (b = import_from_targets_rule(p)) // import_from_targets ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from_targets: '(' import_from_as_names ','? ')' | import_from_as_names | '*' @@ -2386,32 +2386,32 @@ import_from_targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // '(' import_from_as_names ','? ')' + Token * _literal; + Token * _literal_1; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq* a; - Token * literal; - Token * literal_1; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = import_from_as_names_rule(p)) // import_from_as_names && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // import_from_as_names asdl_seq* import_from_as_names_var; @@ -2419,29 +2419,29 @@ import_from_targets_rule(Parser *p) (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names ) { - res = import_from_as_names_var; + _res = import_from_as_names_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from_as_names: ','.import_from_as_name+ @@ -2451,26 +2451,26 @@ import_from_as_names_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.import_from_as_name+ asdl_seq * a; if ( (a = _gather_32_rule(p)) // ','.import_from_as_name+ ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from_as_name: NAME ['as' NAME] @@ -2480,8 +2480,8 @@ import_from_as_name_rule(Parser *p) if (p->error_indicator) { return NULL; } - alias_ty res = NULL; - int mark = p->mark; + alias_ty _res = NULL; + int _mark = p->mark; { // NAME ['as' NAME] expr_ty a; void *b; @@ -2491,18 +2491,18 @@ import_from_as_name_rule(Parser *p) (b = _tmp_34_rule(p), 1) // ['as' NAME] ) { - res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dotted_as_names: ','.dotted_as_name+ @@ -2512,26 +2512,26 @@ dotted_as_names_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.dotted_as_name+ asdl_seq * a; if ( (a = _gather_35_rule(p)) // ','.dotted_as_name+ ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dotted_as_name: dotted_name ['as' NAME] @@ -2541,8 +2541,8 @@ dotted_as_name_rule(Parser *p) if (p->error_indicator) { return NULL; } - alias_ty res = NULL; - int mark = p->mark; + alias_ty _res = NULL; + int _mark = p->mark; { // dotted_name ['as' NAME] expr_ty a; void *b; @@ -2552,18 +2552,18 @@ dotted_as_name_rule(Parser *p) (b = _tmp_37_rule(p), 1) // ['as' NAME] ) { - res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -2572,25 +2572,25 @@ static expr_ty dotted_name_raw(Parser *); static expr_ty dotted_name_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, dotted_name_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, dotted_name_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_0 = _PyPegen_update_memo(p, mark, dotted_name_type, res); + int tmpvar_0 = _PyPegen_update_memo(p, _mark, dotted_name_type, _res); if (tmpvar_0) { - return res; + return _res; } - p->mark = mark; - void *raw = dotted_name_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = dotted_name_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty dotted_name_raw(Parser *p) @@ -2598,28 +2598,28 @@ dotted_name_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // dotted_name '.' NAME + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = dotted_name_rule(p)) // dotted_name && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_join_names_with_dot ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_join_names_with_dot ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // NAME expr_ty name_var; @@ -2627,14 +2627,14 @@ dotted_name_raw(Parser *p) (name_var = _PyPegen_name_token(p)) // NAME ) { - res = name_var; + _res = name_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // if_stmt: @@ -2646,89 +2646,89 @@ if_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'if' named_expression ':' block elif_stmt + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; stmt_ty c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = elif_stmt_rule(p)) // elif_stmt ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'if' named_expression ':' block else_block? + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; void *c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // elif_stmt: @@ -2740,89 +2740,89 @@ elif_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'elif' named_expression ':' block elif_stmt + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; stmt_ty c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = elif_stmt_rule(p)) // elif_stmt ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'elif' named_expression ':' block else_block? + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; void *c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // else_block: 'else' ':' block @@ -2832,32 +2832,32 @@ else_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // 'else' ':' block + Token * _keyword; + Token * _literal; asdl_seq* b; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 516)) // token='else' + (_keyword = _PyPegen_expect_token(p, 516)) // token='else' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - res = b; - if (res == NULL && PyErr_Occurred()) { + _res = b; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // while_stmt: 'while' named_expression ':' block else_block? @@ -2867,54 +2867,54 @@ while_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'while' named_expression ':' block else_block? + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; void *c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 512)) // token='while' + (_keyword = _PyPegen_expect_token(p, 512)) // token='while' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_While ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_While ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // for_stmt: @@ -2926,35 +2926,35 @@ for_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + Token * _keyword; + Token * _keyword_1; + Token * _literal; asdl_seq* b; void *el; expr_ty ex; - Token * keyword; - Token * keyword_1; - Token * literal; expr_ty t; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && @@ -2963,45 +2963,45 @@ for_stmt_rule(Parser *p) (el = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_For ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_For ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + Token * _keyword; + Token * _keyword_1; + Token * _literal; Token * async_var; asdl_seq* b; void *el; expr_ty ex; - Token * keyword; - Token * keyword_1; - Token * literal; expr_ty t; void *tc; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && @@ -3010,26 +3010,26 @@ for_stmt_rule(Parser *p) (el = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async for loops are" , _Py_AsyncFor ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async for loops are" , _Py_AsyncFor ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // with_stmt: @@ -3043,179 +3043,179 @@ with_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'with' '(' ','.with_item+ ','? ')' ':' block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = _gather_38_rule(p)) // ','.with_item+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_With ( a , b , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_With ( a , b , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'with' ','.with_item+ ':' TYPE_COMMENT? block + Token * _keyword; + Token * _literal; asdl_seq * a; asdl_seq* b; - Token * keyword; - Token * literal; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && (a = _gather_40_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_With ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_With ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; Token * async_var; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = _gather_42_rule(p)) // ','.with_item+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NULL , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NULL , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block + Token * _keyword; + Token * _literal; asdl_seq * a; Token * async_var; asdl_seq* b; - Token * keyword; - Token * literal; void *tc; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && (a = _gather_44_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // with_item: expression ['as' target] @@ -3225,8 +3225,8 @@ with_item_rule(Parser *p) if (p->error_indicator) { return NULL; } - withitem_ty res = NULL; - int mark = p->mark; + withitem_ty _res = NULL; + int _mark = p->mark; { // expression ['as' target] expr_ty e; void *o; @@ -3236,18 +3236,18 @@ with_item_rule(Parser *p) (o = _tmp_46_rule(p), 1) // ['as' target] ) { - res = _Py_withitem ( e , o , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_withitem ( e , o , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // try_stmt: @@ -3259,59 +3259,59 @@ try_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'try' ':' block finally_block + Token * _keyword; + Token * _literal; asdl_seq* b; asdl_seq* f; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) // token='try' + (_keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (f = finally_block_rule(p)) // finally_block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Try ( b , NULL , NULL , f , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Try ( b , NULL , NULL , f , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'try' ':' block except_block+ else_block? finally_block? + Token * _keyword; + Token * _literal; asdl_seq* b; void *el; asdl_seq * ex; void *f; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) // token='try' + (_keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && @@ -3322,26 +3322,26 @@ try_stmt_rule(Parser *p) (f = finally_block_rule(p), 1) // finally_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Try ( b , ex , el , f , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Try ( b , ex , el , f , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // except_block: 'except' expression ['as' target] ':' block | 'except' ':' block @@ -3351,83 +3351,83 @@ except_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - excepthandler_ty res = NULL; - int mark = p->mark; + excepthandler_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'except' expression ['as' target] ':' block + Token * _keyword; + Token * _literal; asdl_seq* b; expr_ty e; - Token * keyword; - Token * literal; void *t; if ( - (keyword = _PyPegen_expect_token(p, 520)) // token='except' + (_keyword = _PyPegen_expect_token(p, 520)) // token='except' && (e = expression_rule(p)) // expression && (t = _tmp_48_rule(p), 1) // ['as' target] && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'except' ':' block + Token * _keyword; + Token * _literal; asdl_seq* b; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 520)) // token='except' + (_keyword = _PyPegen_expect_token(p, 520)) // token='except' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ExceptHandler ( NULL , NULL , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ExceptHandler ( NULL , NULL , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // finally_block: 'finally' ':' block @@ -3437,32 +3437,32 @@ finally_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // 'finally' ':' block + Token * _keyword; + Token * _literal; asdl_seq* a; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 521)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 521)) // token='finally' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (a = block_rule(p)) // block ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // return_stmt: 'return' star_expressions? @@ -3472,45 +3472,45 @@ return_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'return' star_expressions? + Token * _keyword; void *a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 500)) // token='return' + (_keyword = _PyPegen_expect_token(p, 500)) // token='return' && (a = star_expressions_rule(p), 1) // star_expressions? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Return ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Return ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // raise_stmt: 'raise' expression ['from' expression] | 'raise' @@ -3520,71 +3520,71 @@ raise_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'raise' expression ['from' expression] + Token * _keyword; expr_ty a; void *b; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' && (a = expression_rule(p)) // expression && (b = _tmp_49_rule(p), 1) // ['from' expression] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Raise ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Raise ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'raise' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Raise ( NULL , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Raise ( NULL , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // function_def: decorators function_def_raw | function_def_raw @@ -3594,8 +3594,8 @@ function_def_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // decorators function_def_raw asdl_seq* d; stmt_ty f; @@ -3605,14 +3605,14 @@ function_def_rule(Parser *p) (f = function_def_raw_rule(p)) // function_def_raw ) { - res = _PyPegen_function_def_decorators ( p , d , f ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_function_def_decorators ( p , d , f ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // function_def_raw stmt_ty function_def_raw_var; @@ -3620,14 +3620,14 @@ function_def_rule(Parser *p) (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw ) { - res = function_def_raw_var; + _res = function_def_raw_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // function_def_raw: @@ -3639,116 +3639,116 @@ function_def_raw_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; void *a; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; expr_ty n; void *params; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 522)) // token='def' + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && (a = _tmp_50_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && (tc = func_type_comment_rule(p), 1) // func_type_comment? && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_FunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_FunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; void *a; Token * async_var; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; expr_ty n; void *params; void *tc; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 522)) // token='def' + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && (a = _tmp_51_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && (tc = func_type_comment_rule(p), 1) // func_type_comment? && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async functions are" , _Py_AsyncFunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async functions are" , _Py_AsyncFunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // func_type_comment: @@ -3761,8 +3761,8 @@ func_type_comment_rule(Parser *p) if (p->error_indicator) { return NULL; } - Token* res = NULL; - int mark = p->mark; + Token* _res = NULL; + int _mark = p->mark; { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) Token * newline_var; Token * t; @@ -3774,14 +3774,14 @@ func_type_comment_rule(Parser *p) _PyPegen_lookahead(1, _tmp_52_rule, p) ) { - res = t; - if (res == NULL && PyErr_Occurred()) { + _res = t; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_double_type_comments void *invalid_double_type_comments_var; @@ -3789,10 +3789,10 @@ func_type_comment_rule(Parser *p) (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments ) { - res = invalid_double_type_comments_var; + _res = invalid_double_type_comments_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // TYPE_COMMENT Token * type_comment_var; @@ -3800,14 +3800,14 @@ func_type_comment_rule(Parser *p) (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' ) { - res = type_comment_var; + _res = type_comment_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // params: invalid_parameters | parameters @@ -3817,18 +3817,18 @@ params_rule(Parser *p) if (p->error_indicator) { return NULL; } - arguments_ty res = NULL; - int mark = p->mark; + arguments_ty _res = NULL; + int _mark = p->mark; { // invalid_parameters void *invalid_parameters_var; if ( (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters ) { - res = invalid_parameters_var; + _res = invalid_parameters_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // parameters arguments_ty parameters_var; @@ -3836,14 +3836,14 @@ params_rule(Parser *p) (parameters_var = parameters_rule(p)) // parameters ) { - res = parameters_var; + _res = parameters_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // parameters: @@ -3858,8 +3858,8 @@ parameters_rule(Parser *p) if (p->error_indicator) { return NULL; } - arguments_ty res = NULL; - int mark = p->mark; + arguments_ty _res = NULL; + int _mark = p->mark; { // slash_no_default param_no_default* param_with_default* star_etc? asdl_seq* a; asdl_seq * b; @@ -3875,14 +3875,14 @@ parameters_rule(Parser *p) (d = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // slash_with_default param_with_default* star_etc? SlashWithDefault* a; @@ -3896,14 +3896,14 @@ parameters_rule(Parser *p) (c = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_no_default+ param_with_default* star_etc? asdl_seq * a; @@ -3917,14 +3917,14 @@ parameters_rule(Parser *p) (c = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_with_default+ star_etc? asdl_seq * a; @@ -3935,14 +3935,14 @@ parameters_rule(Parser *p) (b = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_etc StarEtc* a; @@ -3950,18 +3950,18 @@ parameters_rule(Parser *p) (a = star_etc_rule(p)) // star_etc ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slash_no_default: param_no_default+ '/' ',' | param_no_default+ '/' &')' @@ -3971,52 +3971,52 @@ slash_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // param_no_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; - Token * literal; - Token * literal_1; if ( (a = _loop1_59_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_no_default+ '/' &')' + Token * _literal; asdl_seq * a; - Token * literal; if ( (a = _loop1_60_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slash_with_default: @@ -4028,58 +4028,58 @@ slash_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - SlashWithDefault* res = NULL; - int mark = p->mark; + SlashWithDefault* _res = NULL; + int _mark = p->mark; { // param_no_default* param_with_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; asdl_seq * b; - Token * literal; - Token * literal_1; if ( (a = _loop0_61_rule(p)) // param_no_default* && (b = _loop1_62_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_no_default* param_with_default+ '/' &')' + Token * _literal; asdl_seq * a; asdl_seq * b; - Token * literal; if ( (a = _loop0_63_rule(p)) // param_no_default* && (b = _loop1_64_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_etc: @@ -4093,15 +4093,15 @@ star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - StarEtc* res = NULL; - int mark = p->mark; + StarEtc* _res = NULL; + int _mark = p->mark; { // '*' param_no_default param_maybe_default* kwds? + Token * _literal; arg_ty a; asdl_seq * b; void *c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = param_no_default_rule(p)) // param_no_default && @@ -4110,38 +4110,38 @@ star_etc_rule(Parser *p) (c = kwds_rule(p), 1) // kwds? ) { - res = _PyPegen_star_etc ( p , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' ',' param_maybe_default+ kwds? + Token * _literal; + Token * _literal_1; asdl_seq * b; void *c; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && (b = _loop1_66_rule(p)) // param_maybe_default+ && (c = kwds_rule(p), 1) // kwds? ) { - res = _PyPegen_star_etc ( p , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // kwds arg_ty a; @@ -4149,14 +4149,14 @@ star_etc_rule(Parser *p) (a = kwds_rule(p)) // kwds ) { - res = _PyPegen_star_etc ( p , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_star_etc void *invalid_star_etc_var; @@ -4164,14 +4164,14 @@ star_etc_rule(Parser *p) (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc ) { - res = invalid_star_etc_var; + _res = invalid_star_etc_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwds: '**' param_no_default @@ -4181,29 +4181,29 @@ kwds_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // '**' param_no_default + Token * _literal; arg_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = param_no_default_rule(p)) // param_no_default ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param_no_default: param ',' TYPE_COMMENT? | param TYPE_COMMENT? &')' @@ -4213,28 +4213,28 @@ param_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // param ',' TYPE_COMMENT? + Token * _literal; arg_ty a; - Token * literal; void *tc; if ( (a = param_rule(p)) // param && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param TYPE_COMMENT? &')' arg_ty a; @@ -4247,18 +4247,18 @@ param_no_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param_with_default: param default ',' TYPE_COMMENT? | param default TYPE_COMMENT? &')' @@ -4268,31 +4268,31 @@ param_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // param default ',' TYPE_COMMENT? + Token * _literal; arg_ty a; expr_ty c; - Token * literal; void *tc; if ( (a = param_rule(p)) // param && (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param default TYPE_COMMENT? &')' arg_ty a; @@ -4308,18 +4308,18 @@ param_with_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param_maybe_default: @@ -4331,31 +4331,31 @@ param_maybe_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // param default? ',' TYPE_COMMENT? + Token * _literal; arg_ty a; void *c; - Token * literal; void *tc; if ( (a = param_rule(p)) // param && (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param default? TYPE_COMMENT? &')' arg_ty a; @@ -4371,18 +4371,18 @@ param_maybe_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param: NAME annotation? @@ -4392,16 +4392,16 @@ param_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME annotation? expr_ty a; void *b; @@ -4411,26 +4411,26 @@ param_rule(Parser *p) (b = annotation_rule(p), 1) // annotation? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_arg ( a -> v . Name . id , b , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_arg ( a -> v . Name . id , b , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // annotation: ':' expression @@ -4440,29 +4440,29 @@ annotation_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // ':' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (a = expression_rule(p)) // expression ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // default: '=' expression @@ -4472,29 +4472,29 @@ default_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // '=' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (a = expression_rule(p)) // expression ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // decorators: (('@' named_expression NEWLINE))+ @@ -4504,26 +4504,26 @@ decorators_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // (('@' named_expression NEWLINE))+ asdl_seq * a; if ( (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // class_def: decorators class_def_raw | class_def_raw @@ -4533,8 +4533,8 @@ class_def_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // decorators class_def_raw asdl_seq* a; stmt_ty b; @@ -4544,14 +4544,14 @@ class_def_rule(Parser *p) (b = class_def_raw_rule(p)) // class_def_raw ) { - res = _PyPegen_class_def_decorators ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_class_def_decorators ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // class_def_raw stmt_ty class_def_raw_var; @@ -4559,14 +4559,14 @@ class_def_rule(Parser *p) (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw ) { - res = class_def_raw_var; + _res = class_def_raw_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // class_def_raw: 'class' NAME ['(' arguments? ')'] ':' block @@ -4576,54 +4576,54 @@ class_def_raw_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'class' NAME ['(' arguments? ')'] ':' block + Token * _keyword; + Token * _literal; expr_ty a; void *b; asdl_seq* c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 523)) // token='class' + (_keyword = _PyPegen_expect_token(p, 523)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && (b = _tmp_68_rule(p), 1) // ['(' arguments? ')'] && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (c = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ClassDef ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , c , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ClassDef ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , c , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // block: NEWLINE INDENT statements DEDENT | simple_stmt | invalid_block @@ -4633,10 +4633,10 @@ block_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - if (_PyPegen_is_memoized(p, block_type, &res)) - return res; - int mark = p->mark; + asdl_seq* _res = NULL; + if (_PyPegen_is_memoized(p, block_type, &_res)) + return _res; + int _mark = p->mark; { // NEWLINE INDENT statements DEDENT asdl_seq* a; Token * dedent_var; @@ -4652,14 +4652,14 @@ block_rule(Parser *p) (dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // simple_stmt asdl_seq* simple_stmt_var; @@ -4667,10 +4667,10 @@ block_rule(Parser *p) (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { - res = simple_stmt_var; + _res = simple_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_block void *invalid_block_var; @@ -4678,15 +4678,15 @@ block_rule(Parser *p) (invalid_block_var = invalid_block_rule(p)) // invalid_block ) { - res = invalid_block_var; + _res = invalid_block_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, block_type, res); - return res; + _PyPegen_insert_memo(p, _mark, block_type, _res); + return _res; } // expressions_list: ','.star_expression+ ','? @@ -4696,30 +4696,30 @@ expressions_list_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.star_expression+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_69_rule(p)) // ','.star_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_expressions: @@ -4732,71 +4732,71 @@ star_expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_expression ((',' star_expression))+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; asdl_seq * b; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = star_expression_rule(p)) // star_expression && (b = _loop1_71_rule(p)) // ((',' star_expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expression ',' + Token * _literal; expr_ty a; - Token * literal; if ( (a = star_expression_rule(p)) // star_expression && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expression expr_ty star_expression_var; @@ -4804,14 +4804,14 @@ star_expressions_rule(Parser *p) (star_expression_var = star_expression_rule(p)) // star_expression ) { - res = star_expression_var; + _res = star_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_expression: '*' bitwise_or | expression @@ -4821,43 +4821,43 @@ star_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, star_expression_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, star_expression_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = bitwise_or_rule(p)) // bitwise_or ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression expr_ty expression_var; @@ -4865,15 +4865,15 @@ star_expression_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - res = expression_var; + _res = expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, star_expression_type, res); - return res; + _PyPegen_insert_memo(p, _mark, star_expression_type, _res); + return _res; } // star_named_expressions: ','.star_named_expression+ ','? @@ -4883,30 +4883,30 @@ star_named_expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.star_named_expression+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_72_rule(p)) // ','.star_named_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_named_expression: '*' bitwise_or | named_expression @@ -4916,41 +4916,41 @@ star_named_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = bitwise_or_rule(p)) // bitwise_or ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // named_expression expr_ty named_expression_var; @@ -4958,14 +4958,14 @@ star_named_expression_rule(Parser *p) (named_expression_var = named_expression_rule(p)) // named_expression ) { - res = named_expression_var; + _res = named_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // named_expression: NAME ':=' expression | expression !':=' | invalid_named_expression @@ -4975,44 +4975,44 @@ named_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':=' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 53)) // token=':=' + (_literal = _PyPegen_expect_token(p, 53)) // token=':=' && (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_NamedExpr ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_NamedExpr ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression !':=' expr_ty expression_var; @@ -5022,10 +5022,10 @@ named_expression_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - res = expression_var; + _res = expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_named_expression void *invalid_named_expression_var; @@ -5033,14 +5033,14 @@ named_expression_rule(Parser *p) (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression ) { - res = invalid_named_expression_var; + _res = invalid_named_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // annotated_rhs: yield_expr | star_expressions @@ -5050,18 +5050,18 @@ annotated_rhs_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; @@ -5069,14 +5069,14 @@ annotated_rhs_rule(Parser *p) (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // expressions: expression ((',' expression))+ ','? | expression ',' | expression @@ -5086,71 +5086,71 @@ expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression ((',' expression))+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; asdl_seq * b; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = expression_rule(p)) // expression && (b = _loop1_74_rule(p)) // ((',' expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ',' + Token * _literal; expr_ty a; - Token * literal; if ( (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression expr_ty expression_var; @@ -5158,14 +5158,14 @@ expressions_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - res = expression_var; + _res = expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // expression: disjunction 'if' disjunction 'else' expression | disjunction | lambdef @@ -5175,52 +5175,52 @@ expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, expression_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, expression_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // disjunction 'if' disjunction 'else' expression + Token * _keyword; + Token * _keyword_1; expr_ty a; expr_ty b; expr_ty c; - Token * keyword; - Token * keyword_1; if ( (a = disjunction_rule(p)) // disjunction && - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (keyword_1 = _PyPegen_expect_token(p, 516)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 516)) // token='else' && (c = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_IfExp ( b , a , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_IfExp ( b , a , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // disjunction expr_ty disjunction_var; @@ -5228,10 +5228,10 @@ expression_rule(Parser *p) (disjunction_var = disjunction_rule(p)) // disjunction ) { - res = disjunction_var; + _res = disjunction_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // lambdef expr_ty lambdef_var; @@ -5239,15 +5239,15 @@ expression_rule(Parser *p) (lambdef_var = lambdef_rule(p)) // lambdef ) { - res = lambdef_var; + _res = lambdef_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, expression_type, res); - return res; + _PyPegen_insert_memo(p, _mark, expression_type, _res); + return _res; } // lambdef: 'lambda' lambda_parameters? ':' expression @@ -5257,51 +5257,51 @@ lambdef_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'lambda' lambda_parameters? ':' expression + Token * _keyword; + Token * _literal; void *a; expr_ty b; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 524)) // token='lambda' + (_keyword = _PyPegen_expect_token(p, 524)) // token='lambda' && (a = lambda_parameters_rule(p), 1) // lambda_parameters? && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Lambda ( ( a ) ? a : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Lambda ( ( a ) ? a : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_parameters: @@ -5316,8 +5316,8 @@ lambda_parameters_rule(Parser *p) if (p->error_indicator) { return NULL; } - arguments_ty res = NULL; - int mark = p->mark; + arguments_ty _res = NULL; + int _mark = p->mark; { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? asdl_seq* a; asdl_seq * b; @@ -5333,14 +5333,14 @@ lambda_parameters_rule(Parser *p) (d = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? SlashWithDefault* a; @@ -5354,14 +5354,14 @@ lambda_parameters_rule(Parser *p) (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? asdl_seq * a; @@ -5375,14 +5375,14 @@ lambda_parameters_rule(Parser *p) (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_with_default+ lambda_star_etc? asdl_seq * a; @@ -5393,14 +5393,14 @@ lambda_parameters_rule(Parser *p) (b = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_star_etc StarEtc* a; @@ -5408,18 +5408,18 @@ lambda_parameters_rule(Parser *p) (a = lambda_star_etc_rule(p)) // lambda_star_etc ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_slash_no_default: @@ -5431,52 +5431,52 @@ lambda_slash_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // lambda_param_no_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; - Token * literal; - Token * literal_1; if ( (a = _loop1_81_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_no_default+ '/' &':' + Token * _literal; asdl_seq * a; - Token * literal; if ( (a = _loop1_82_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_slash_with_default: @@ -5488,58 +5488,58 @@ lambda_slash_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - SlashWithDefault* res = NULL; - int mark = p->mark; + SlashWithDefault* _res = NULL; + int _mark = p->mark; { // lambda_param_no_default* lambda_param_with_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; asdl_seq * b; - Token * literal; - Token * literal_1; if ( (a = _loop0_83_rule(p)) // lambda_param_no_default* && (b = _loop1_84_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_no_default* lambda_param_with_default+ '/' &':' + Token * _literal; asdl_seq * a; asdl_seq * b; - Token * literal; if ( (a = _loop0_85_rule(p)) // lambda_param_no_default* && (b = _loop1_86_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_star_etc: @@ -5553,15 +5553,15 @@ lambda_star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - StarEtc* res = NULL; - int mark = p->mark; + StarEtc* _res = NULL; + int _mark = p->mark; { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? + Token * _literal; arg_ty a; asdl_seq * b; void *c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = lambda_param_no_default_rule(p)) // lambda_param_no_default && @@ -5570,38 +5570,38 @@ lambda_star_etc_rule(Parser *p) (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { - res = _PyPegen_star_etc ( p , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' ',' lambda_param_maybe_default+ lambda_kwds? + Token * _literal; + Token * _literal_1; asdl_seq * b; void *c; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && (b = _loop1_88_rule(p)) // lambda_param_maybe_default+ && (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { - res = _PyPegen_star_etc ( p , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_kwds arg_ty a; @@ -5609,14 +5609,14 @@ lambda_star_etc_rule(Parser *p) (a = lambda_kwds_rule(p)) // lambda_kwds ) { - res = _PyPegen_star_etc ( p , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_lambda_star_etc void *invalid_lambda_star_etc_var; @@ -5624,14 +5624,14 @@ lambda_star_etc_rule(Parser *p) (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc ) { - res = invalid_lambda_star_etc_var; + _res = invalid_lambda_star_etc_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_kwds: '**' lambda_param_no_default @@ -5641,29 +5641,29 @@ lambda_kwds_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // '**' lambda_param_no_default + Token * _literal; arg_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param_no_default: lambda_param ',' | lambda_param &':' @@ -5673,25 +5673,25 @@ lambda_param_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // lambda_param ',' + Token * _literal; arg_ty a; - Token * literal; if ( (a = lambda_param_rule(p)) // lambda_param && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param &':' arg_ty a; @@ -5701,18 +5701,18 @@ lambda_param_no_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param_with_default: lambda_param default ',' | lambda_param default &':' @@ -5722,28 +5722,28 @@ lambda_param_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // lambda_param default ',' + Token * _literal; arg_ty a; expr_ty c; - Token * literal; if ( (a = lambda_param_rule(p)) // lambda_param && (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param default &':' arg_ty a; @@ -5756,18 +5756,18 @@ lambda_param_with_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param_maybe_default: lambda_param default? ',' | lambda_param default? &':' @@ -5777,28 +5777,28 @@ lambda_param_maybe_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // lambda_param default? ',' + Token * _literal; arg_ty a; void *c; - Token * literal; if ( (a = lambda_param_rule(p)) // lambda_param && (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param default? &':' arg_ty a; @@ -5811,18 +5811,18 @@ lambda_param_maybe_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param: NAME @@ -5832,42 +5832,42 @@ lambda_param_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_arg ( a -> v . Name . id , NULL , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_arg ( a -> v . Name . id , NULL , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // disjunction: conjunction (('or' conjunction))+ | conjunction @@ -5877,18 +5877,18 @@ disjunction_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, disjunction_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, disjunction_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // conjunction (('or' conjunction))+ expr_ty a; asdl_seq * b; @@ -5898,22 +5898,22 @@ disjunction_rule(Parser *p) (b = _loop1_89_rule(p)) // (('or' conjunction))+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BoolOp ( Or , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BoolOp ( Or , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // conjunction expr_ty conjunction_var; @@ -5921,15 +5921,15 @@ disjunction_rule(Parser *p) (conjunction_var = conjunction_rule(p)) // conjunction ) { - res = conjunction_var; + _res = conjunction_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, disjunction_type, res); - return res; + _PyPegen_insert_memo(p, _mark, disjunction_type, _res); + return _res; } // conjunction: inversion (('and' inversion))+ | inversion @@ -5939,18 +5939,18 @@ conjunction_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, conjunction_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, conjunction_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // inversion (('and' inversion))+ expr_ty a; asdl_seq * b; @@ -5960,22 +5960,22 @@ conjunction_rule(Parser *p) (b = _loop1_90_rule(p)) // (('and' inversion))+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BoolOp ( And , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BoolOp ( And , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // inversion expr_ty inversion_var; @@ -5983,15 +5983,15 @@ conjunction_rule(Parser *p) (inversion_var = inversion_rule(p)) // inversion ) { - res = inversion_var; + _res = inversion_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, conjunction_type, res); - return res; + _PyPegen_insert_memo(p, _mark, conjunction_type, _res); + return _res; } // inversion: 'not' inversion | comparison @@ -6001,43 +6001,43 @@ inversion_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, inversion_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, inversion_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'not' inversion + Token * _keyword; expr_ty a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 525)) // token='not' + (_keyword = _PyPegen_expect_token(p, 525)) // token='not' && (a = inversion_rule(p)) // inversion ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( Not , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( Not , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // comparison expr_ty comparison_var; @@ -6045,15 +6045,15 @@ inversion_rule(Parser *p) (comparison_var = comparison_rule(p)) // comparison ) { - res = comparison_var; + _res = comparison_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, inversion_type, res); - return res; + _PyPegen_insert_memo(p, _mark, inversion_type, _res); + return _res; } // comparison: bitwise_or compare_op_bitwise_or_pair+ | bitwise_or @@ -6063,16 +6063,16 @@ comparison_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or compare_op_bitwise_or_pair+ expr_ty a; asdl_seq * b; @@ -6082,22 +6082,22 @@ comparison_rule(Parser *p) (b = _loop1_91_rule(p)) // compare_op_bitwise_or_pair+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Compare ( a , CHECK ( _PyPegen_get_cmpops ( p , b ) ) , CHECK ( _PyPegen_get_exprs ( p , b ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Compare ( a , CHECK ( _PyPegen_get_cmpops ( p , b ) ) , CHECK ( _PyPegen_get_exprs ( p , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // bitwise_or expr_ty bitwise_or_var; @@ -6105,14 +6105,14 @@ comparison_rule(Parser *p) (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or ) { - res = bitwise_or_var; + _res = bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // compare_op_bitwise_or_pair: @@ -6132,18 +6132,18 @@ compare_op_bitwise_or_pair_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // eq_bitwise_or CmpopExprPair* eq_bitwise_or_var; if ( (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or ) { - res = eq_bitwise_or_var; + _res = eq_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // noteq_bitwise_or CmpopExprPair* noteq_bitwise_or_var; @@ -6151,10 +6151,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or ) { - res = noteq_bitwise_or_var; + _res = noteq_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // lte_bitwise_or CmpopExprPair* lte_bitwise_or_var; @@ -6162,10 +6162,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or ) { - res = lte_bitwise_or_var; + _res = lte_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // lt_bitwise_or CmpopExprPair* lt_bitwise_or_var; @@ -6173,10 +6173,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or ) { - res = lt_bitwise_or_var; + _res = lt_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // gte_bitwise_or CmpopExprPair* gte_bitwise_or_var; @@ -6184,10 +6184,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or ) { - res = gte_bitwise_or_var; + _res = gte_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // gt_bitwise_or CmpopExprPair* gt_bitwise_or_var; @@ -6195,10 +6195,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or ) { - res = gt_bitwise_or_var; + _res = gt_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // notin_bitwise_or CmpopExprPair* notin_bitwise_or_var; @@ -6206,10 +6206,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or ) { - res = notin_bitwise_or_var; + _res = notin_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // in_bitwise_or CmpopExprPair* in_bitwise_or_var; @@ -6217,10 +6217,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or ) { - res = in_bitwise_or_var; + _res = in_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // isnot_bitwise_or CmpopExprPair* isnot_bitwise_or_var; @@ -6228,10 +6228,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or ) { - res = isnot_bitwise_or_var; + _res = isnot_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // is_bitwise_or CmpopExprPair* is_bitwise_or_var; @@ -6239,14 +6239,14 @@ compare_op_bitwise_or_pair_rule(Parser *p) (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or ) { - res = is_bitwise_or_var; + _res = is_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // eq_bitwise_or: '==' bitwise_or @@ -6256,29 +6256,29 @@ eq_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '==' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 27)) // token='==' + (_literal = _PyPegen_expect_token(p, 27)) // token='==' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // noteq_bitwise_or: ('!=') bitwise_or @@ -6288,8 +6288,8 @@ noteq_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // ('!=') bitwise_or void *_tmp_92_var; expr_ty a; @@ -6299,18 +6299,18 @@ noteq_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lte_bitwise_or: '<=' bitwise_or @@ -6320,29 +6320,29 @@ lte_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '<=' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 29)) // token='<=' + (_literal = _PyPegen_expect_token(p, 29)) // token='<=' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lt_bitwise_or: '<' bitwise_or @@ -6352,29 +6352,29 @@ lt_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '<' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 20)) // token='<' + (_literal = _PyPegen_expect_token(p, 20)) // token='<' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // gte_bitwise_or: '>=' bitwise_or @@ -6384,29 +6384,29 @@ gte_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '>=' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 30)) // token='>=' + (_literal = _PyPegen_expect_token(p, 30)) // token='>=' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // gt_bitwise_or: '>' bitwise_or @@ -6416,29 +6416,29 @@ gt_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '>' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 21)) // token='>' + (_literal = _PyPegen_expect_token(p, 21)) // token='>' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // notin_bitwise_or: 'not' 'in' bitwise_or @@ -6448,32 +6448,32 @@ notin_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'not' 'in' bitwise_or + Token * _keyword; + Token * _keyword_1; expr_ty a; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 525)) // token='not' + (_keyword = _PyPegen_expect_token(p, 525)) // token='not' && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // in_bitwise_or: 'in' bitwise_or @@ -6483,29 +6483,29 @@ in_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'in' bitwise_or + Token * _keyword; expr_ty a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword = _PyPegen_expect_token(p, 518)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , In , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , In , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // isnot_bitwise_or: 'is' 'not' bitwise_or @@ -6515,32 +6515,32 @@ isnot_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'is' 'not' bitwise_or + Token * _keyword; + Token * _keyword_1; expr_ty a; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 526)) // token='is' + (_keyword = _PyPegen_expect_token(p, 526)) // token='is' && - (keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' + (_keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // is_bitwise_or: 'is' bitwise_or @@ -6550,29 +6550,29 @@ is_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'is' bitwise_or + Token * _keyword; expr_ty a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 526)) // token='is' + (_keyword = _PyPegen_expect_token(p, 526)) // token='is' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Is , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Is , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6581,25 +6581,25 @@ static expr_ty bitwise_or_raw(Parser *); static expr_ty bitwise_or_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, bitwise_or_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_or_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_1 = _PyPegen_update_memo(p, mark, bitwise_or_type, res); + int tmpvar_1 = _PyPegen_update_memo(p, _mark, bitwise_or_type, _res); if (tmpvar_1) { - return res; + return _res; } - p->mark = mark; - void *raw = bitwise_or_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = bitwise_or_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty bitwise_or_raw(Parser *p) @@ -6607,44 +6607,44 @@ bitwise_or_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or '|' bitwise_xor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = bitwise_or_rule(p)) // bitwise_or && - (literal = _PyPegen_expect_token(p, 18)) // token='|' + (_literal = _PyPegen_expect_token(p, 18)) // token='|' && (b = bitwise_xor_rule(p)) // bitwise_xor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , BitOr , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitOr , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // bitwise_xor expr_ty bitwise_xor_var; @@ -6652,14 +6652,14 @@ bitwise_or_raw(Parser *p) (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor ) { - res = bitwise_xor_var; + _res = bitwise_xor_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6668,25 +6668,25 @@ static expr_ty bitwise_xor_raw(Parser *); static expr_ty bitwise_xor_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, bitwise_xor_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_xor_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_2 = _PyPegen_update_memo(p, mark, bitwise_xor_type, res); + int tmpvar_2 = _PyPegen_update_memo(p, _mark, bitwise_xor_type, _res); if (tmpvar_2) { - return res; + return _res; } - p->mark = mark; - void *raw = bitwise_xor_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = bitwise_xor_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty bitwise_xor_raw(Parser *p) @@ -6694,44 +6694,44 @@ bitwise_xor_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_xor '^' bitwise_and + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = bitwise_xor_rule(p)) // bitwise_xor && - (literal = _PyPegen_expect_token(p, 32)) // token='^' + (_literal = _PyPegen_expect_token(p, 32)) // token='^' && (b = bitwise_and_rule(p)) // bitwise_and ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , BitXor , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitXor , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // bitwise_and expr_ty bitwise_and_var; @@ -6739,14 +6739,14 @@ bitwise_xor_raw(Parser *p) (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and ) { - res = bitwise_and_var; + _res = bitwise_and_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6755,25 +6755,25 @@ static expr_ty bitwise_and_raw(Parser *); static expr_ty bitwise_and_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, bitwise_and_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_and_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_3 = _PyPegen_update_memo(p, mark, bitwise_and_type, res); + int tmpvar_3 = _PyPegen_update_memo(p, _mark, bitwise_and_type, _res); if (tmpvar_3) { - return res; + return _res; } - p->mark = mark; - void *raw = bitwise_and_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = bitwise_and_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty bitwise_and_raw(Parser *p) @@ -6781,44 +6781,44 @@ bitwise_and_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_and '&' shift_expr + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = bitwise_and_rule(p)) // bitwise_and && - (literal = _PyPegen_expect_token(p, 19)) // token='&' + (_literal = _PyPegen_expect_token(p, 19)) // token='&' && (b = shift_expr_rule(p)) // shift_expr ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , BitAnd , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitAnd , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // shift_expr expr_ty shift_expr_var; @@ -6826,14 +6826,14 @@ bitwise_and_raw(Parser *p) (shift_expr_var = shift_expr_rule(p)) // shift_expr ) { - res = shift_expr_var; + _res = shift_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6842,25 +6842,25 @@ static expr_ty shift_expr_raw(Parser *); static expr_ty shift_expr_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, shift_expr_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, shift_expr_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_4 = _PyPegen_update_memo(p, mark, shift_expr_type, res); + int tmpvar_4 = _PyPegen_update_memo(p, _mark, shift_expr_type, _res); if (tmpvar_4) { - return res; + return _res; } - p->mark = mark; - void *raw = shift_expr_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = shift_expr_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty shift_expr_raw(Parser *p) @@ -6868,73 +6868,73 @@ shift_expr_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // shift_expr '<<' sum + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 33)) // token='<<' + (_literal = _PyPegen_expect_token(p, 33)) // token='<<' && (b = sum_rule(p)) // sum ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , LShift , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , LShift , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // shift_expr '>>' sum + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 34)) // token='>>' + (_literal = _PyPegen_expect_token(p, 34)) // token='>>' && (b = sum_rule(p)) // sum ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , RShift , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , RShift , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // sum expr_ty sum_var; @@ -6942,14 +6942,14 @@ shift_expr_raw(Parser *p) (sum_var = sum_rule(p)) // sum ) { - res = sum_var; + _res = sum_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6958,25 +6958,25 @@ static expr_ty sum_raw(Parser *); static expr_ty sum_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, sum_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, sum_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_5 = _PyPegen_update_memo(p, mark, sum_type, res); + int tmpvar_5 = _PyPegen_update_memo(p, _mark, sum_type, _res); if (tmpvar_5) { - return res; + return _res; } - p->mark = mark; - void *raw = sum_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = sum_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty sum_raw(Parser *p) @@ -6984,73 +6984,73 @@ sum_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // sum '+' term + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 14)) // token='+' + (_literal = _PyPegen_expect_token(p, 14)) // token='+' && (b = term_rule(p)) // term ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Add , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Add , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // sum '-' term + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 15)) // token='-' + (_literal = _PyPegen_expect_token(p, 15)) // token='-' && (b = term_rule(p)) // term ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Sub , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Sub , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term expr_ty term_var; @@ -7058,14 +7058,14 @@ sum_raw(Parser *p) (term_var = term_rule(p)) // term ) { - res = term_var; + _res = term_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -7080,25 +7080,25 @@ static expr_ty term_raw(Parser *); static expr_ty term_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, term_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, term_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_6 = _PyPegen_update_memo(p, mark, term_type, res); + int tmpvar_6 = _PyPegen_update_memo(p, _mark, term_type, _res); if (tmpvar_6) { - return res; + return _res; } - p->mark = mark; - void *raw = term_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = term_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty term_raw(Parser *p) @@ -7106,160 +7106,160 @@ term_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // term '*' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Mult , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Mult , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '/' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Div , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Div , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '//' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 47)) // token='//' + (_literal = _PyPegen_expect_token(p, 47)) // token='//' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , FloorDiv , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , FloorDiv , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '%' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 24)) // token='%' + (_literal = _PyPegen_expect_token(p, 24)) // token='%' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Mod , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Mod , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '@' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 49)) // token='@' + (_literal = _PyPegen_expect_token(p, 49)) // token='@' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "The '@' operator is" , _Py_BinOp ( a , MatMult , b , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "The '@' operator is" , _Py_BinOp ( a , MatMult , b , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // factor expr_ty factor_var; @@ -7267,14 +7267,14 @@ term_raw(Parser *p) (factor_var = factor_rule(p)) // factor ) { - res = factor_var; + _res = factor_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // factor: '+' factor | '-' factor | '~' factor | power @@ -7284,95 +7284,95 @@ factor_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, factor_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, factor_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '+' factor + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 14)) // token='+' + (_literal = _PyPegen_expect_token(p, 14)) // token='+' && (a = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( UAdd , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( UAdd , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '-' factor + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 15)) // token='-' + (_literal = _PyPegen_expect_token(p, 15)) // token='-' && (a = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( USub , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( USub , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '~' factor + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 31)) // token='~' + (_literal = _PyPegen_expect_token(p, 31)) // token='~' && (a = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( Invert , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( Invert , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // power expr_ty power_var; @@ -7380,15 +7380,15 @@ factor_rule(Parser *p) (power_var = power_rule(p)) // power ) { - res = power_var; + _res = power_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, factor_type, res); - return res; + _PyPegen_insert_memo(p, _mark, factor_type, _res); + return _res; } // power: await_primary '**' factor | await_primary @@ -7398,44 +7398,44 @@ power_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // await_primary '**' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = await_primary_rule(p)) // await_primary && - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Pow , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Pow , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // await_primary expr_ty await_primary_var; @@ -7443,14 +7443,14 @@ power_rule(Parser *p) (await_primary_var = await_primary_rule(p)) // await_primary ) { - res = await_primary_var; + _res = await_primary_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // await_primary: AWAIT primary | primary @@ -7460,18 +7460,18 @@ await_primary_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, await_primary_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, await_primary_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // AWAIT primary expr_ty a; Token * await_var; @@ -7481,22 +7481,22 @@ await_primary_rule(Parser *p) (a = primary_rule(p)) // primary ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Await expressions are" , _Py_Await ( a , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Await expressions are" , _Py_Await ( a , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary expr_ty primary_var; @@ -7504,15 +7504,15 @@ await_primary_rule(Parser *p) (primary_var = primary_rule(p)) // primary ) { - res = primary_var; + _res = primary_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, await_primary_type, res); - return res; + _PyPegen_insert_memo(p, _mark, await_primary_type, _res); + return _res; } // Left-recursive @@ -7526,25 +7526,25 @@ static expr_ty primary_raw(Parser *); static expr_ty primary_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, primary_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, primary_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_7 = _PyPegen_update_memo(p, mark, primary_type, res); + int tmpvar_7 = _PyPegen_update_memo(p, _mark, primary_type, _res); if (tmpvar_7) { - return res; + return _res; } - p->mark = mark; - void *raw = primary_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = primary_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty primary_raw(Parser *p) @@ -7552,44 +7552,44 @@ primary_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // primary '.' NAME + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary genexp expr_ty a; @@ -7600,86 +7600,86 @@ primary_raw(Parser *p) (b = genexp_rule(p)) // genexp ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary '(' arguments? ')' + Token * _literal; + Token * _literal_1; expr_ty a; void *b; - Token * literal; - Token * literal_1; if ( (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary '[' slices ']' + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // atom expr_ty atom_var; @@ -7687,14 +7687,14 @@ primary_raw(Parser *p) (atom_var = atom_rule(p)) // atom ) { - res = atom_var; + _res = atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slices: slice !',' | ','.slice+ ','? @@ -7704,16 +7704,16 @@ slices_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // slice !',' expr_ty a; if ( @@ -7722,45 +7722,45 @@ slices_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.slice+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_93_rule(p)) // ','.slice+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slice: expression? ':' expression? [':' expression?] | expression @@ -7770,47 +7770,47 @@ slice_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression? ':' expression? [':' expression?] + Token * _literal; void *a; void *b; void *c; - Token * literal; if ( (a = expression_rule(p), 1) // expression? && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p), 1) // expression? && (c = _tmp_95_rule(p), 1) // [':' expression?] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Slice ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Slice ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression expr_ty a; @@ -7818,18 +7818,18 @@ slice_rule(Parser *p) (a = expression_rule(p)) // expression ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // atom: @@ -7850,110 +7850,110 @@ atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty name_var; if ( (name_var = _PyPegen_name_token(p)) // NAME ) { - res = name_var; + _res = name_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'True' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 527)) // token='True' + (_keyword = _PyPegen_expect_token(p, 527)) // token='True' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_True , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_True , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'False' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 528)) // token='False' + (_keyword = _PyPegen_expect_token(p, 528)) // token='False' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_False , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_False , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'None' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 529)) // token='None' + (_keyword = _PyPegen_expect_token(p, 529)) // token='None' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_None , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_None , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '__new_parser__' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' + (_keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' ) { - res = RAISE_SYNTAX_ERROR ( "You found it!" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "You found it!" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &STRING strings expr_ty strings_var; @@ -7963,10 +7963,10 @@ atom_rule(Parser *p) (strings_var = strings_rule(p)) // strings ) { - res = strings_var; + _res = strings_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // NUMBER expr_ty number_var; @@ -7974,10 +7974,10 @@ atom_rule(Parser *p) (number_var = _PyPegen_number_token(p)) // NUMBER ) { - res = number_var; + _res = number_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'(' (tuple | group | genexp) void *_tmp_96_var; @@ -7987,10 +7987,10 @@ atom_rule(Parser *p) (_tmp_96_var = _tmp_96_rule(p)) // tuple | group | genexp ) { - res = _tmp_96_var; + _res = _tmp_96_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'[' (list | listcomp) void *_tmp_97_var; @@ -8000,10 +8000,10 @@ atom_rule(Parser *p) (_tmp_97_var = _tmp_97_rule(p)) // list | listcomp ) { - res = _tmp_97_var; + _res = _tmp_97_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'{' (dict | set | dictcomp | setcomp) void *_tmp_98_var; @@ -8013,37 +8013,37 @@ atom_rule(Parser *p) (_tmp_98_var = _tmp_98_rule(p)) // dict | set | dictcomp | setcomp ) { - res = _tmp_98_var; + _res = _tmp_98_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // '...' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 52)) // token='...' + (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_Ellipsis , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_Ellipsis , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // strings: STRING+ @@ -8053,29 +8053,29 @@ strings_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, strings_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, strings_type, &_res)) + return _res; + int _mark = p->mark; { // STRING+ asdl_seq * a; if ( (a = _loop1_99_rule(p)) // STRING+ ) { - res = _PyPegen_concatenate_strings ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_concatenate_strings ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, strings_type, res); - return res; + _PyPegen_insert_memo(p, _mark, strings_type, _res); + return _res; } // list: '[' star_named_expressions? ']' @@ -8085,48 +8085,48 @@ list_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' star_named_expressions? ']' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (a = star_named_expressions_rule(p), 1) // star_named_expressions? && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // listcomp: '[' named_expression for_if_clauses ']' | invalid_comprehension @@ -8136,47 +8136,47 @@ listcomp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' named_expression for_if_clauses ']' + Token * _literal; + Token * _literal_1; expr_ty a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (a = named_expression_rule(p)) // named_expression && (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ListComp ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ListComp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_comprehension void *invalid_comprehension_var; @@ -8184,14 +8184,14 @@ listcomp_rule(Parser *p) (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { - res = invalid_comprehension_var; + _res = invalid_comprehension_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // tuple: '(' [star_named_expression ',' star_named_expressions?] ')' @@ -8201,48 +8201,48 @@ tuple_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' [star_named_expression ',' star_named_expressions?] ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = _tmp_100_rule(p), 1) // [star_named_expression ',' star_named_expressions?] && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // group: '(' (yield_expr | named_expression) ')' @@ -8252,32 +8252,32 @@ group_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // '(' (yield_expr | named_expression) ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = _tmp_101_rule(p)) // yield_expr | named_expression && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // genexp: '(' expression for_if_clauses ')' | invalid_comprehension @@ -8287,47 +8287,47 @@ genexp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' expression for_if_clauses ')' + Token * _literal; + Token * _literal_1; expr_ty a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = expression_rule(p)) // expression && (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_GeneratorExp ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_GeneratorExp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_comprehension void *invalid_comprehension_var; @@ -8335,14 +8335,14 @@ genexp_rule(Parser *p) (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { - res = invalid_comprehension_var; + _res = invalid_comprehension_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // set: '{' expressions_list '}' @@ -8352,48 +8352,48 @@ set_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expressions_list '}' + Token * _literal; + Token * _literal_1; asdl_seq* a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && (a = expressions_list_rule(p)) // expressions_list && - (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Set ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Set ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // setcomp: '{' expression for_if_clauses '}' | invalid_comprehension @@ -8403,47 +8403,47 @@ setcomp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expression for_if_clauses '}' + Token * _literal; + Token * _literal_1; expr_ty a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && (a = expression_rule(p)) // expression && (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_SetComp ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_SetComp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_comprehension void *invalid_comprehension_var; @@ -8451,14 +8451,14 @@ setcomp_rule(Parser *p) (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { - res = invalid_comprehension_var; + _res = invalid_comprehension_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dict: '{' kvpairs? '}' @@ -8468,48 +8468,48 @@ dict_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' kvpairs? '}' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && (a = kvpairs_rule(p), 1) // kvpairs? && - (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Dict ( CHECK ( _PyPegen_get_keys ( p , a ) ) , CHECK ( _PyPegen_get_values ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Dict ( CHECK ( _PyPegen_get_keys ( p , a ) ) , CHECK ( _PyPegen_get_values ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dictcomp: '{' kvpair for_if_clauses '}' @@ -8519,51 +8519,51 @@ dictcomp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' kvpair for_if_clauses '}' + Token * _literal; + Token * _literal_1; KeyValuePair* a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && (a = kvpair_rule(p)) // kvpair && (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_DictComp ( a -> key , a -> value , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_DictComp ( a -> key , a -> value , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kvpairs: ','.kvpair+ ','? @@ -8573,30 +8573,30 @@ kvpairs_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.kvpair+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_102_rule(p)) // ','.kvpair+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kvpair: '**' bitwise_or | expression ':' expression @@ -8606,50 +8606,50 @@ kvpair_rule(Parser *p) if (p->error_indicator) { return NULL; } - KeyValuePair* res = NULL; - int mark = p->mark; + KeyValuePair* _res = NULL; + int _mark = p->mark; { // '**' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_key_value_pair ( p , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_key_value_pair ( p , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ':' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression ) { - res = _PyPegen_key_value_pair ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_key_value_pair ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // for_if_clauses: for_if_clause+ @@ -8659,22 +8659,22 @@ for_if_clauses_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // for_if_clause+ asdl_seq * _loop1_104_var; if ( (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ ) { - res = _loop1_104_var; + _res = _loop1_104_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // for_if_clause: @@ -8686,68 +8686,68 @@ for_if_clause_rule(Parser *p) if (p->error_indicator) { return NULL; } - comprehension_ty res = NULL; - int mark = p->mark; + comprehension_ty _res = NULL; + int _mark = p->mark; { // ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* + Token * _keyword; + Token * _keyword_1; expr_ty a; Token * async_var; expr_ty b; asdl_seq * c; - Token * keyword; - Token * keyword_1; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (b = disjunction_rule(p)) // disjunction && (c = _loop0_105_rule(p)) // (('if' disjunction))* ) { - res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); - if (res == NULL && PyErr_Occurred()) { + _res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'for' star_targets 'in' disjunction (('if' disjunction))* + Token * _keyword; + Token * _keyword_1; expr_ty a; expr_ty b; asdl_seq * c; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (b = disjunction_rule(p)) // disjunction && (c = _loop0_106_rule(p)) // (('if' disjunction))* ) { - res = _Py_comprehension ( a , b , c , 0 , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_comprehension ( a , b , c , 0 , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // yield_expr: 'yield' 'from' expression | 'yield' star_expressions? @@ -8757,74 +8757,74 @@ yield_expr_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'yield' 'from' expression + Token * _keyword; + Token * _keyword_1; expr_ty a; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 504)) // token='yield' + (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' && - (keyword_1 = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 514)) // token='from' && (a = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_YieldFrom ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_YieldFrom ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'yield' star_expressions? + Token * _keyword; void *a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 504)) // token='yield' + (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' && (a = star_expressions_rule(p), 1) // star_expressions? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Yield ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Yield ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // arguments: args ','? &')' | incorrect_arguments @@ -8834,30 +8834,30 @@ arguments_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, arguments_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, arguments_type, &_res)) + return _res; + int _mark = p->mark; { // args ','? &')' + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = args_rule(p)) // args && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // incorrect_arguments void *incorrect_arguments_var; @@ -8865,15 +8865,15 @@ arguments_rule(Parser *p) (incorrect_arguments_var = incorrect_arguments_rule(p)) // incorrect_arguments ) { - res = incorrect_arguments_var; + _res = incorrect_arguments_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, arguments_type, res); - return res; + _PyPegen_insert_memo(p, _mark, arguments_type, _res); + return _res; } // args: starred_expression [',' args] | kwargs | named_expression [',' args] @@ -8883,16 +8883,16 @@ args_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // starred_expression [',' args] expr_ty a; void *b; @@ -8902,22 +8902,22 @@ args_rule(Parser *p) (b = _tmp_107_rule(p), 1) // [',' args] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // kwargs asdl_seq* a; @@ -8925,22 +8925,22 @@ args_rule(Parser *p) (a = kwargs_rule(p)) // kwargs ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( _PyPegen_dummy_name ( p ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_extract_starred_exprs ( p , a ) ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_delete_starred_exprs ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_extract_starred_exprs ( p , a ) ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_delete_starred_exprs ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // named_expression [',' args] expr_ty a; @@ -8951,26 +8951,26 @@ args_rule(Parser *p) (b = _tmp_108_rule(p), 1) // [',' args] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwargs: @@ -8983,28 +8983,28 @@ kwargs_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ + Token * _literal; asdl_seq * a; asdl_seq * b; - Token * literal; if ( (a = _gather_109_rule(p)) // ','.kwarg_or_starred+ && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (b = _gather_111_rule(p)) // ','.kwarg_or_double_starred+ ) { - res = _PyPegen_join_sequences ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_join_sequences ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.kwarg_or_starred+ asdl_seq * _gather_113_var; @@ -9012,10 +9012,10 @@ kwargs_rule(Parser *p) (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ ) { - res = _gather_113_var; + _res = _gather_113_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.kwarg_or_double_starred+ asdl_seq * _gather_115_var; @@ -9023,14 +9023,14 @@ kwargs_rule(Parser *p) (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ ) { - res = _gather_115_var; + _res = _gather_115_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // starred_expression: '*' expression @@ -9040,45 +9040,45 @@ starred_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwarg_or_starred: NAME '=' expression | starred_expression | invalid_kwarg @@ -9088,44 +9088,44 @@ kwarg_or_starred_rule(Parser *p) if (p->error_indicator) { return NULL; } - KeywordOrStarred* res = NULL; - int mark = p->mark; + KeywordOrStarred* _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // starred_expression expr_ty a; @@ -9133,14 +9133,14 @@ kwarg_or_starred_rule(Parser *p) (a = starred_expression_rule(p)) // starred_expression ) { - res = _PyPegen_keyword_or_starred ( p , a , 0 ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_keyword_or_starred ( p , a , 0 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_kwarg void *invalid_kwarg_var; @@ -9148,14 +9148,14 @@ kwarg_or_starred_rule(Parser *p) (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg ) { - res = invalid_kwarg_var; + _res = invalid_kwarg_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwarg_or_double_starred: NAME '=' expression | '**' expression | invalid_kwarg @@ -9165,70 +9165,70 @@ kwarg_or_double_starred_rule(Parser *p) if (p->error_indicator) { return NULL; } - KeywordOrStarred* res = NULL; - int mark = p->mark; + KeywordOrStarred* _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( NULL , a , EXTRA ) ) , 1 ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( NULL , a , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_kwarg void *invalid_kwarg_var; @@ -9236,14 +9236,14 @@ kwarg_or_double_starred_rule(Parser *p) (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg ) { - res = invalid_kwarg_var; + _res = invalid_kwarg_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_targets: star_target !',' | star_target ((',' star_target))* ','? @@ -9253,16 +9253,16 @@ star_targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_target !',' expr_ty a; if ( @@ -9271,48 +9271,48 @@ star_targets_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_target ((',' star_target))* ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; asdl_seq * b; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = star_target_rule(p)) // star_target && (b = _loop0_117_rule(p)) // ((',' star_target))* && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_targets_seq: ','.star_target+ ','? @@ -9322,30 +9322,30 @@ star_targets_seq_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.star_target+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_118_rule(p)) // ','.star_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_target: @@ -9359,108 +9359,108 @@ star_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, star_target_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, star_target_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' (!'*' star_target) + Token * _literal; void *a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = _tmp_120_rule(p)) // !'*' star_target ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_atom expr_ty star_atom_var; @@ -9468,15 +9468,15 @@ star_target_rule(Parser *p) (star_atom_var = star_atom_rule(p)) // star_atom ) { - res = star_atom_var; + _res = star_atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, star_target_type, res); - return res; + _PyPegen_insert_memo(p, _mark, star_target_type, _res); + return _res; } // star_atom: @@ -9490,113 +9490,113 @@ star_atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' star_target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = star_target_rule(p)) // star_target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' star_targets_seq? ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' star_targets_seq? ']' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( a , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // inside_paren_ann_assign_target: @@ -9609,18 +9609,18 @@ inside_paren_ann_assign_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // ann_assign_subscript_attribute_target expr_ty ann_assign_subscript_attribute_target_var; if ( (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target ) { - res = ann_assign_subscript_attribute_target_var; + _res = ann_assign_subscript_attribute_target_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // NAME expr_ty a; @@ -9628,39 +9628,39 @@ inside_paren_ann_assign_target_rule(Parser *p) (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' inside_paren_ann_assign_target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // ann_assign_subscript_attribute_target: @@ -9672,84 +9672,84 @@ ann_assign_subscript_attribute_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // del_targets: ','.del_target+ ','? @@ -9759,30 +9759,30 @@ del_targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.del_target+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_121_rule(p)) // ','.del_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // del_target: @@ -9795,82 +9795,82 @@ del_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, del_target_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, del_target_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // del_t_atom expr_ty del_t_atom_var; @@ -9878,15 +9878,15 @@ del_target_rule(Parser *p) (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom ) { - res = del_t_atom_var; + _res = del_t_atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, del_target_type, res); - return res; + _PyPegen_insert_memo(p, _mark, del_target_type, _res); + return _res; } // del_t_atom: NAME | '(' del_target ')' | '(' del_targets? ')' | '[' del_targets? ']' @@ -9896,113 +9896,113 @@ del_t_atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Del ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Del ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' del_target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = del_target_rule(p)) // del_target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = _PyPegen_set_expr_context ( p , a , Del ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Del ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' del_targets? ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' del_targets? ']' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( a , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // targets: ','.target+ ','? @@ -10012,30 +10012,30 @@ targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.target+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_123_rule(p)) // ','.target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // target: @@ -10048,82 +10048,82 @@ target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, target_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, target_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_atom expr_ty t_atom_var; @@ -10131,15 +10131,15 @@ target_rule(Parser *p) (t_atom_var = t_atom_rule(p)) // t_atom ) { - res = t_atom_var; + _res = t_atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, target_type, res); - return res; + _PyPegen_insert_memo(p, _mark, target_type, _res); + return _res; } // Left-recursive @@ -10153,25 +10153,25 @@ static expr_ty t_primary_raw(Parser *); static expr_ty t_primary_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, t_primary_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, t_primary_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_8 = _PyPegen_update_memo(p, mark, t_primary_type, res); + int tmpvar_8 = _PyPegen_update_memo(p, _mark, t_primary_type, _res); if (tmpvar_8) { - return res; + return _res; } - p->mark = mark; - void *raw = t_primary_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = t_primary_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty t_primary_raw(Parser *p) @@ -10179,80 +10179,80 @@ t_primary_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME &t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' &t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary genexp &t_lookahead expr_ty a; @@ -10265,56 +10265,56 @@ t_primary_raw(Parser *p) _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '(' arguments? ')' &t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; void *b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // atom &t_lookahead expr_ty a; @@ -10324,18 +10324,18 @@ t_primary_raw(Parser *p) _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // t_lookahead: '(' | '[' | '.' @@ -10345,44 +10345,44 @@ t_lookahead_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '(' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '.' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // t_atom: NAME | '(' target ')' | '(' targets? ')' | '[' targets? ']' @@ -10392,113 +10392,113 @@ t_atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = target_rule(p)) // target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' targets? ')' + Token * _literal; + Token * _literal_1; void *b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' targets? ']' + Token * _literal; + Token * _literal_1; void *b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // incorrect_arguments: @@ -10511,78 +10511,78 @@ incorrect_arguments_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // args ',' '*' + Token * _literal; + Token * _literal_1; expr_ty args_var; - Token * literal; - Token * literal_1; if ( (args_var = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' ) { - res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression for_if_clauses ',' [args | expression for_if_clauses] + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; asdl_seq* for_if_clauses_var; - Token * literal; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (expression_var = expression_rule(p)) // expression && (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] + (_opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] ) { - res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // args ',' args + Token * _literal; expr_ty a; expr_ty args_var; - Token * literal; if ( (a = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (args_var = args_rule(p)) // args ) { - res = _PyPegen_arguments_parsing_error ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_arguments_parsing_error ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_kwarg: expression '=' @@ -10592,29 +10592,29 @@ invalid_kwarg_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // expression '=' + Token * _literal; expr_ty expression_var; - Token * literal; if ( (expression_var = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - res = RAISE_SYNTAX_ERROR ( "expression cannot contain assignment, perhaps you meant \"==\"?" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "expression cannot contain assignment, perhaps you meant \"==\"?" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_named_expression: expression ':=' expression @@ -10624,32 +10624,32 @@ invalid_named_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // expression ':=' expression + Token * _literal; expr_ty a; expr_ty expression_var; - Token * literal; if ( (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 53)) // token=':=' + (_literal = _PyPegen_expect_token(p, 53)) // token=':=' && (expression_var = expression_rule(p)) // expression ) { - res = RAISE_SYNTAX_ERROR ( "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_assignment: @@ -10663,68 +10663,68 @@ invalid_assignment_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // list ':' + Token * _literal; expr_ty list_var; - Token * literal; if ( (list_var = list_rule(p)) // list && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = RAISE_SYNTAX_ERROR ( "only single target (not list) can be annotated" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "only single target (not list) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // tuple ':' - Token * literal; + Token * _literal; expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = RAISE_SYNTAX_ERROR ( "only single target (not tuple) can be annotated" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "only single target (not tuple) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ':' expression ['=' annotated_rhs] + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; expr_ty expression_var_1; - Token * literal; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (expression_var = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (expression_var_1 = expression_rule(p)) // expression && - (opt_var = _tmp_126_rule(p), 1) // ['=' annotated_rhs] + (_opt_var = _tmp_126_rule(p), 1) // ['=' annotated_rhs] ) { - res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ('=' | augassign) (yield_expr | star_expressions) void *_tmp_127_var; @@ -10738,18 +10738,18 @@ invalid_assignment_rule(Parser *p) (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions ) { - res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_block: NEWLINE !INDENT @@ -10759,8 +10759,8 @@ invalid_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // NEWLINE !INDENT Token * newline_var; if ( @@ -10769,18 +10769,18 @@ invalid_block_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { - res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_comprehension: ('[' | '(' | '{') '*' expression for_if_clauses @@ -10790,35 +10790,35 @@ invalid_comprehension_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ('[' | '(' | '{') '*' expression for_if_clauses + Token * _literal; void *_tmp_129_var; expr_ty expression_var; asdl_seq* for_if_clauses_var; - Token * literal; if ( (_tmp_129_var = _tmp_129_rule(p)) // '[' | '(' | '{' && - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - res = RAISE_SYNTAX_ERROR ( "iterable unpacking cannot be used in comprehension" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "iterable unpacking cannot be used in comprehension" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_parameters: @@ -10829,8 +10829,8 @@ invalid_parameters_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // param_no_default* (slash_with_default | param_with_default+) param_no_default asdl_seq * _loop0_130_var; void *_tmp_131_var; @@ -10843,18 +10843,18 @@ invalid_parameters_rule(Parser *p) (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_star_etc: '*' (')' | ',' (')' | '**')) @@ -10864,29 +10864,29 @@ invalid_star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '*' (')' | ',' (')' | '**')) + Token * _literal; void *_tmp_132_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (_tmp_132_var = _tmp_132_rule(p)) // ')' | ',' (')' | '**') ) { - res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_lambda_star_etc: '*' (':' | ',' (':' | '**')) @@ -10896,29 +10896,29 @@ invalid_lambda_star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '*' (':' | ',' (':' | '**')) + Token * _literal; void *_tmp_133_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (_tmp_133_var = _tmp_133_rule(p)) // ':' | ',' (':' | '**') ) { - res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_double_type_comments: TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT @@ -10928,8 +10928,8 @@ invalid_double_type_comments_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT Token * indent_var; Token * newline_var; @@ -10948,18 +10948,18 @@ invalid_double_type_comments_rule(Parser *p) (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { - res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_1: NEWLINE @@ -10969,46 +10969,46 @@ _loop0_1_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // NEWLINE Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = newline_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = newline_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_1"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_1_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_1_type, _seq); + return _seq; } // _loop0_2: NEWLINE @@ -11018,46 +11018,46 @@ _loop0_2_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // NEWLINE Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = newline_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = newline_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_2"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_2_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_2_type, _seq); + return _seq; } // _loop0_4: ',' expression @@ -11067,54 +11067,54 @@ _loop0_4_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_4"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_4_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_4_type, _seq); + return _seq; } // _gather_3: expression _loop0_4 @@ -11124,8 +11124,8 @@ _gather_3_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_4 expr_ty elem; asdl_seq * seq; @@ -11135,14 +11135,14 @@ _gather_3_rule(Parser *p) (seq = _loop0_4_rule(p)) // _loop0_4 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_6: ',' expression @@ -11152,54 +11152,54 @@ _loop0_6_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_6"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_6_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_6_type, _seq); + return _seq; } // _gather_5: expression _loop0_6 @@ -11209,8 +11209,8 @@ _gather_5_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_6 expr_ty elem; asdl_seq * seq; @@ -11220,14 +11220,14 @@ _gather_5_rule(Parser *p) (seq = _loop0_6_rule(p)) // _loop0_6 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_8: ',' expression @@ -11237,54 +11237,54 @@ _loop0_8_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_8"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_8_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_8_type, _seq); + return _seq; } // _gather_7: expression _loop0_8 @@ -11294,8 +11294,8 @@ _gather_7_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_8 expr_ty elem; asdl_seq * seq; @@ -11305,14 +11305,14 @@ _gather_7_rule(Parser *p) (seq = _loop0_8_rule(p)) // _loop0_8 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_10: ',' expression @@ -11322,54 +11322,54 @@ _loop0_10_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_10"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_10_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_10_type, _seq); + return _seq; } // _gather_9: expression _loop0_10 @@ -11379,8 +11379,8 @@ _gather_9_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_10 expr_ty elem; asdl_seq * seq; @@ -11390,14 +11390,14 @@ _gather_9_rule(Parser *p) (seq = _loop0_10_rule(p)) // _loop0_10 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_11: statement @@ -11407,50 +11407,50 @@ _loop1_11_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // statement asdl_seq* statement_var; while ( (statement_var = statement_rule(p)) // statement ) { - res = statement_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = statement_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_11"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_11_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_11_type, _seq); + return _seq; } // _loop0_13: ';' small_stmt @@ -11460,54 +11460,54 @@ _loop0_13_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ';' small_stmt + Token * _literal; stmt_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 13)) // token=';' + (_literal = _PyPegen_expect_token(p, 13)) // token=';' && (elem = small_stmt_rule(p)) // small_stmt ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_13"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_13_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_13_type, _seq); + return _seq; } // _gather_12: small_stmt _loop0_13 @@ -11517,8 +11517,8 @@ _gather_12_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // small_stmt _loop0_13 stmt_ty elem; asdl_seq * seq; @@ -11528,14 +11528,14 @@ _gather_12_rule(Parser *p) (seq = _loop0_13_rule(p)) // _loop0_13 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_14: 'import' | 'from' @@ -11545,33 +11545,33 @@ _tmp_14_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'import' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) // token='import' + (_keyword = _PyPegen_expect_token(p, 513)) // token='import' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'from' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_15: 'def' | '@' | ASYNC @@ -11581,29 +11581,29 @@ _tmp_15_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'def' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 522)) // token='def' + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // '@' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 49)) // token='@' + (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC Token * async_var; @@ -11611,14 +11611,14 @@ _tmp_15_rule(Parser *p) (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - res = async_var; + _res = async_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_16: 'class' | '@' @@ -11628,33 +11628,33 @@ _tmp_16_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'class' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 523)) // token='class' + (_keyword = _PyPegen_expect_token(p, 523)) // token='class' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // '@' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 49)) // token='@' + (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_17: 'with' | ASYNC @@ -11664,18 +11664,18 @@ _tmp_17_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'with' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC Token * async_var; @@ -11683,14 +11683,14 @@ _tmp_17_rule(Parser *p) (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - res = async_var; + _res = async_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_18: 'for' | ASYNC @@ -11700,18 +11700,18 @@ _tmp_18_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'for' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC Token * async_var; @@ -11719,14 +11719,14 @@ _tmp_18_rule(Parser *p) (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - res = async_var; + _res = async_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_19: '=' annotated_rhs @@ -11736,29 +11736,29 @@ _tmp_19_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' annotated_rhs + Token * _literal; expr_ty d; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (d = annotated_rhs_rule(p)) // annotated_rhs ) { - res = d; - if (res == NULL && PyErr_Occurred()) { + _res = d; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_20: '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target @@ -11768,28 +11768,28 @@ _tmp_20_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '(' inside_paren_ann_assign_target ')' + Token * _literal; + Token * _literal_1; expr_ty b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (b = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = b; - if (res == NULL && PyErr_Occurred()) { + _res = b; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ann_assign_subscript_attribute_target expr_ty ann_assign_subscript_attribute_target_var; @@ -11797,14 +11797,14 @@ _tmp_20_rule(Parser *p) (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target ) { - res = ann_assign_subscript_attribute_target_var; + _res = ann_assign_subscript_attribute_target_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_21: '=' annotated_rhs @@ -11814,29 +11814,29 @@ _tmp_21_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' annotated_rhs + Token * _literal; expr_ty d; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (d = annotated_rhs_rule(p)) // annotated_rhs ) { - res = d; - if (res == NULL && PyErr_Occurred()) { + _res = d; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_22: (star_targets '=') @@ -11846,50 +11846,50 @@ _loop1_22_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (star_targets '=') void *_tmp_134_var; while ( (_tmp_134_var = _tmp_134_rule(p)) // star_targets '=' ) { - res = _tmp_134_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_134_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_22"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_22_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_22_type, _seq); + return _seq; } // _tmp_23: yield_expr | star_expressions @@ -11899,18 +11899,18 @@ _tmp_23_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; @@ -11918,14 +11918,14 @@ _tmp_23_rule(Parser *p) (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_24: yield_expr | star_expressions @@ -11935,18 +11935,18 @@ _tmp_24_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; @@ -11954,14 +11954,14 @@ _tmp_24_rule(Parser *p) (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_26: ',' NAME @@ -11971,54 +11971,54 @@ _loop0_26_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' NAME + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = _PyPegen_name_token(p)) // NAME ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_26"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_26_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_26_type, _seq); + return _seq; } // _gather_25: NAME _loop0_26 @@ -12028,8 +12028,8 @@ _gather_25_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // NAME _loop0_26 expr_ty elem; asdl_seq * seq; @@ -12039,14 +12039,14 @@ _gather_25_rule(Parser *p) (seq = _loop0_26_rule(p)) // _loop0_26 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_28: ',' NAME @@ -12056,54 +12056,54 @@ _loop0_28_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' NAME + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = _PyPegen_name_token(p)) // NAME ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_28"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_28_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_28_type, _seq); + return _seq; } // _gather_27: NAME _loop0_28 @@ -12113,8 +12113,8 @@ _gather_27_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // NAME _loop0_28 expr_ty elem; asdl_seq * seq; @@ -12124,14 +12124,14 @@ _gather_27_rule(Parser *p) (seq = _loop0_28_rule(p)) // _loop0_28 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_29: ',' expression @@ -12141,29 +12141,29 @@ _tmp_29_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' expression - Token * literal; + Token * _literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_30: ('.' | '...') @@ -12173,46 +12173,46 @@ _loop0_30_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('.' | '...') void *_tmp_135_var; while ( (_tmp_135_var = _tmp_135_rule(p)) // '.' | '...' ) { - res = _tmp_135_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_135_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_30"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_30_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_30_type, _seq); + return _seq; } // _loop1_31: ('.' | '...') @@ -12222,50 +12222,50 @@ _loop1_31_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('.' | '...') void *_tmp_136_var; while ( (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' ) { - res = _tmp_136_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_136_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_31"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_31_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_31_type, _seq); + return _seq; } // _loop0_33: ',' import_from_as_name @@ -12275,54 +12275,54 @@ _loop0_33_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' import_from_as_name + Token * _literal; alias_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = import_from_as_name_rule(p)) // import_from_as_name ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_33"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_33_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_33_type, _seq); + return _seq; } // _gather_32: import_from_as_name _loop0_33 @@ -12332,8 +12332,8 @@ _gather_32_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // import_from_as_name _loop0_33 alias_ty elem; asdl_seq * seq; @@ -12343,14 +12343,14 @@ _gather_32_rule(Parser *p) (seq = _loop0_33_rule(p)) // _loop0_33 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_34: 'as' NAME @@ -12360,29 +12360,29 @@ _tmp_34_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' NAME - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_36: ',' dotted_as_name @@ -12392,54 +12392,54 @@ _loop0_36_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' dotted_as_name + Token * _literal; alias_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = dotted_as_name_rule(p)) // dotted_as_name ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_36"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_36_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_36_type, _seq); + return _seq; } // _gather_35: dotted_as_name _loop0_36 @@ -12449,8 +12449,8 @@ _gather_35_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // dotted_as_name _loop0_36 alias_ty elem; asdl_seq * seq; @@ -12460,14 +12460,14 @@ _gather_35_rule(Parser *p) (seq = _loop0_36_rule(p)) // _loop0_36 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_37: 'as' NAME @@ -12477,29 +12477,29 @@ _tmp_37_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' NAME - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_39: ',' with_item @@ -12509,54 +12509,54 @@ _loop0_39_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_39"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_39_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_39_type, _seq); + return _seq; } // _gather_38: with_item _loop0_39 @@ -12566,8 +12566,8 @@ _gather_38_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_39 withitem_ty elem; asdl_seq * seq; @@ -12577,14 +12577,14 @@ _gather_38_rule(Parser *p) (seq = _loop0_39_rule(p)) // _loop0_39 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_41: ',' with_item @@ -12594,54 +12594,54 @@ _loop0_41_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_41"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_41_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_41_type, _seq); + return _seq; } // _gather_40: with_item _loop0_41 @@ -12651,8 +12651,8 @@ _gather_40_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_41 withitem_ty elem; asdl_seq * seq; @@ -12662,14 +12662,14 @@ _gather_40_rule(Parser *p) (seq = _loop0_41_rule(p)) // _loop0_41 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_43: ',' with_item @@ -12679,54 +12679,54 @@ _loop0_43_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_43"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_43_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_43_type, _seq); + return _seq; } // _gather_42: with_item _loop0_43 @@ -12736,8 +12736,8 @@ _gather_42_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_43 withitem_ty elem; asdl_seq * seq; @@ -12747,14 +12747,14 @@ _gather_42_rule(Parser *p) (seq = _loop0_43_rule(p)) // _loop0_43 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_45: ',' with_item @@ -12764,54 +12764,54 @@ _loop0_45_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_45"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_45_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_45_type, _seq); + return _seq; } // _gather_44: with_item _loop0_45 @@ -12821,8 +12821,8 @@ _gather_44_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_45 withitem_ty elem; asdl_seq * seq; @@ -12832,14 +12832,14 @@ _gather_44_rule(Parser *p) (seq = _loop0_45_rule(p)) // _loop0_45 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_46: 'as' target @@ -12849,29 +12849,29 @@ _tmp_46_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' target - Token * keyword; + Token * _keyword; expr_ty t; if ( - (keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && (t = target_rule(p)) // target ) { - res = t; - if (res == NULL && PyErr_Occurred()) { + _res = t; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_47: except_block @@ -12881,50 +12881,50 @@ _loop1_47_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // except_block excepthandler_ty except_block_var; while ( (except_block_var = except_block_rule(p)) // except_block ) { - res = except_block_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = except_block_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_47"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_47_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_47_type, _seq); + return _seq; } // _tmp_48: 'as' target @@ -12934,29 +12934,29 @@ _tmp_48_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' target - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && (z = target_rule(p)) // target ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_49: 'from' expression @@ -12966,29 +12966,29 @@ _tmp_49_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'from' expression - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_50: '->' expression @@ -12998,29 +12998,29 @@ _tmp_50_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '->' expression - Token * literal; + Token * _literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) // token='->' + (_literal = _PyPegen_expect_token(p, 51)) // token='->' && (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_51: '->' expression @@ -13030,29 +13030,29 @@ _tmp_51_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '->' expression - Token * literal; + Token * _literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) // token='->' + (_literal = _PyPegen_expect_token(p, 51)) // token='->' && (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_52: NEWLINE INDENT @@ -13062,8 +13062,8 @@ _tmp_52_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // NEWLINE INDENT Token * indent_var; Token * newline_var; @@ -13073,14 +13073,14 @@ _tmp_52_rule(Parser *p) (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { - res = _PyPegen_dummy_name(p, newline_var, indent_var); + _res = _PyPegen_dummy_name(p, newline_var, indent_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_53: param_no_default @@ -13090,46 +13090,46 @@ _loop0_53_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_53"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_53_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_53_type, _seq); + return _seq; } // _loop0_54: param_with_default @@ -13139,46 +13139,46 @@ _loop0_54_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_54"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_54_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_54_type, _seq); + return _seq; } // _loop0_55: param_with_default @@ -13188,46 +13188,46 @@ _loop0_55_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_55"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_55_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_55_type, _seq); + return _seq; } // _loop1_56: param_no_default @@ -13237,50 +13237,50 @@ _loop1_56_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_56"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_56_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_56_type, _seq); + return _seq; } // _loop0_57: param_with_default @@ -13290,46 +13290,46 @@ _loop0_57_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_57"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_57_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_57_type, _seq); + return _seq; } // _loop1_58: param_with_default @@ -13339,50 +13339,50 @@ _loop1_58_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_58"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_58_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_58_type, _seq); + return _seq; } // _loop1_59: param_no_default @@ -13392,50 +13392,50 @@ _loop1_59_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_59"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_59_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_59_type, _seq); + return _seq; } // _loop1_60: param_no_default @@ -13445,50 +13445,50 @@ _loop1_60_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_60"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_60_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_60_type, _seq); + return _seq; } // _loop0_61: param_no_default @@ -13498,46 +13498,46 @@ _loop0_61_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_61"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_61_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_61_type, _seq); + return _seq; } // _loop1_62: param_with_default @@ -13547,50 +13547,50 @@ _loop1_62_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_62"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_62_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_62_type, _seq); + return _seq; } // _loop0_63: param_no_default @@ -13600,46 +13600,46 @@ _loop0_63_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_63"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_63_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_63_type, _seq); + return _seq; } // _loop1_64: param_with_default @@ -13649,50 +13649,50 @@ _loop1_64_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_64"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_64_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_64_type, _seq); + return _seq; } // _loop0_65: param_maybe_default @@ -13702,46 +13702,46 @@ _loop0_65_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { - res = param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_65"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_65_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_65_type, _seq); + return _seq; } // _loop1_66: param_maybe_default @@ -13751,50 +13751,50 @@ _loop1_66_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { - res = param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_66"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_66_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_66_type, _seq); + return _seq; } // _loop1_67: ('@' named_expression NEWLINE) @@ -13804,50 +13804,50 @@ _loop1_67_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('@' named_expression NEWLINE) void *_tmp_137_var; while ( (_tmp_137_var = _tmp_137_rule(p)) // '@' named_expression NEWLINE ) { - res = _tmp_137_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_137_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_67"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_67_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_67_type, _seq); + return _seq; } // _tmp_68: '(' arguments? ')' @@ -13857,32 +13857,32 @@ _tmp_68_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '(' arguments? ')' - Token * literal; - Token * literal_1; + Token * _literal; + Token * _literal_1; void *z; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (z = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_70: ',' star_expression @@ -13892,54 +13892,54 @@ _loop0_70_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' star_expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = star_expression_rule(p)) // star_expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_70"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_70_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_70_type, _seq); + return _seq; } // _gather_69: star_expression _loop0_70 @@ -13949,8 +13949,8 @@ _gather_69_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // star_expression _loop0_70 expr_ty elem; asdl_seq * seq; @@ -13960,14 +13960,14 @@ _gather_69_rule(Parser *p) (seq = _loop0_70_rule(p)) // _loop0_70 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_71: (',' star_expression) @@ -13977,50 +13977,50 @@ _loop1_71_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (',' star_expression) void *_tmp_138_var; while ( (_tmp_138_var = _tmp_138_rule(p)) // ',' star_expression ) { - res = _tmp_138_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_138_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_71"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_71_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_71_type, _seq); + return _seq; } // _loop0_73: ',' star_named_expression @@ -14030,54 +14030,54 @@ _loop0_73_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' star_named_expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = star_named_expression_rule(p)) // star_named_expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_73"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_73_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_73_type, _seq); + return _seq; } // _gather_72: star_named_expression _loop0_73 @@ -14087,8 +14087,8 @@ _gather_72_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // star_named_expression _loop0_73 expr_ty elem; asdl_seq * seq; @@ -14098,14 +14098,14 @@ _gather_72_rule(Parser *p) (seq = _loop0_73_rule(p)) // _loop0_73 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_74: (',' expression) @@ -14115,50 +14115,50 @@ _loop1_74_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (',' expression) void *_tmp_139_var; while ( (_tmp_139_var = _tmp_139_rule(p)) // ',' expression ) { - res = _tmp_139_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_139_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_74"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_74_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_74_type, _seq); + return _seq; } // _loop0_75: lambda_param_no_default @@ -14168,46 +14168,46 @@ _loop0_75_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_75"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_75_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_75_type, _seq); + return _seq; } // _loop0_76: lambda_param_with_default @@ -14217,46 +14217,46 @@ _loop0_76_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_76"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_76_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_76_type, _seq); + return _seq; } // _loop0_77: lambda_param_with_default @@ -14266,46 +14266,46 @@ _loop0_77_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_77"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_77_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_77_type, _seq); + return _seq; } // _loop1_78: lambda_param_no_default @@ -14315,50 +14315,50 @@ _loop1_78_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_78"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_78_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_78_type, _seq); + return _seq; } // _loop0_79: lambda_param_with_default @@ -14368,46 +14368,46 @@ _loop0_79_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_79"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_79_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_79_type, _seq); + return _seq; } // _loop1_80: lambda_param_with_default @@ -14417,50 +14417,50 @@ _loop1_80_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_80"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_80_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_80_type, _seq); + return _seq; } // _loop1_81: lambda_param_no_default @@ -14470,50 +14470,50 @@ _loop1_81_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_81"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_81_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_81_type, _seq); + return _seq; } // _loop1_82: lambda_param_no_default @@ -14523,50 +14523,50 @@ _loop1_82_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_82"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_82_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_82_type, _seq); + return _seq; } // _loop0_83: lambda_param_no_default @@ -14576,46 +14576,46 @@ _loop0_83_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_83"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_83_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_83_type, _seq); + return _seq; } // _loop1_84: lambda_param_with_default @@ -14625,50 +14625,50 @@ _loop1_84_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_84"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_84_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_84_type, _seq); + return _seq; } // _loop0_85: lambda_param_no_default @@ -14678,46 +14678,46 @@ _loop0_85_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_85"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_85_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_85_type, _seq); + return _seq; } // _loop1_86: lambda_param_with_default @@ -14727,50 +14727,50 @@ _loop1_86_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_86"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_86_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_86_type, _seq); + return _seq; } // _loop0_87: lambda_param_maybe_default @@ -14780,46 +14780,46 @@ _loop0_87_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { - res = lambda_param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_87"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_87_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_87_type, _seq); + return _seq; } // _loop1_88: lambda_param_maybe_default @@ -14829,50 +14829,50 @@ _loop1_88_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { - res = lambda_param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_88"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_88_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_88_type, _seq); + return _seq; } // _loop1_89: ('or' conjunction) @@ -14882,50 +14882,50 @@ _loop1_89_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('or' conjunction) void *_tmp_140_var; while ( (_tmp_140_var = _tmp_140_rule(p)) // 'or' conjunction ) { - res = _tmp_140_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_140_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_89"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_89_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_89_type, _seq); + return _seq; } // _loop1_90: ('and' inversion) @@ -14935,50 +14935,50 @@ _loop1_90_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('and' inversion) void *_tmp_141_var; while ( (_tmp_141_var = _tmp_141_rule(p)) // 'and' inversion ) { - res = _tmp_141_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_141_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_90"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_90_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_90_type, _seq); + return _seq; } // _loop1_91: compare_op_bitwise_or_pair @@ -14988,50 +14988,50 @@ _loop1_91_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // compare_op_bitwise_or_pair CmpopExprPair* compare_op_bitwise_or_pair_var; while ( (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair ) { - res = compare_op_bitwise_or_pair_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = compare_op_bitwise_or_pair_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_91"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_91_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_91_type, _seq); + return _seq; } // _tmp_92: '!=' @@ -15041,26 +15041,26 @@ _tmp_92_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '!=' Token * tok; if ( (tok = _PyPegen_expect_token(p, 28)) // token='!=' ) { - res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_94: ',' slice @@ -15070,54 +15070,54 @@ _loop0_94_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' slice + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = slice_rule(p)) // slice ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_94"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_94_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_94_type, _seq); + return _seq; } // _gather_93: slice _loop0_94 @@ -15127,8 +15127,8 @@ _gather_93_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // slice _loop0_94 expr_ty elem; asdl_seq * seq; @@ -15138,14 +15138,14 @@ _gather_93_rule(Parser *p) (seq = _loop0_94_rule(p)) // _loop0_94 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_95: ':' expression? @@ -15155,29 +15155,29 @@ _tmp_95_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ':' expression? + Token * _literal; void *d; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (d = expression_rule(p), 1) // expression? ) { - res = d; - if (res == NULL && PyErr_Occurred()) { + _res = d; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_96: tuple | group | genexp @@ -15187,18 +15187,18 @@ _tmp_96_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // tuple expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple ) { - res = tuple_var; + _res = tuple_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // group expr_ty group_var; @@ -15206,10 +15206,10 @@ _tmp_96_rule(Parser *p) (group_var = group_rule(p)) // group ) { - res = group_var; + _res = group_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // genexp expr_ty genexp_var; @@ -15217,14 +15217,14 @@ _tmp_96_rule(Parser *p) (genexp_var = genexp_rule(p)) // genexp ) { - res = genexp_var; + _res = genexp_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_97: list | listcomp @@ -15234,18 +15234,18 @@ _tmp_97_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // list expr_ty list_var; if ( (list_var = list_rule(p)) // list ) { - res = list_var; + _res = list_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // listcomp expr_ty listcomp_var; @@ -15253,14 +15253,14 @@ _tmp_97_rule(Parser *p) (listcomp_var = listcomp_rule(p)) // listcomp ) { - res = listcomp_var; + _res = listcomp_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_98: dict | set | dictcomp | setcomp @@ -15270,18 +15270,18 @@ _tmp_98_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // dict expr_ty dict_var; if ( (dict_var = dict_rule(p)) // dict ) { - res = dict_var; + _res = dict_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // set expr_ty set_var; @@ -15289,10 +15289,10 @@ _tmp_98_rule(Parser *p) (set_var = set_rule(p)) // set ) { - res = set_var; + _res = set_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // dictcomp expr_ty dictcomp_var; @@ -15300,10 +15300,10 @@ _tmp_98_rule(Parser *p) (dictcomp_var = dictcomp_rule(p)) // dictcomp ) { - res = dictcomp_var; + _res = dictcomp_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // setcomp expr_ty setcomp_var; @@ -15311,14 +15311,14 @@ _tmp_98_rule(Parser *p) (setcomp_var = setcomp_rule(p)) // setcomp ) { - res = setcomp_var; + _res = setcomp_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_99: STRING @@ -15328,50 +15328,50 @@ _loop1_99_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // STRING expr_ty string_var; while ( (string_var = _PyPegen_string_token(p)) // STRING ) { - res = string_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = string_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_99"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_99_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_99_type, _seq); + return _seq; } // _tmp_100: star_named_expression ',' star_named_expressions? @@ -15381,32 +15381,32 @@ _tmp_100_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // star_named_expression ',' star_named_expressions? - Token * literal; + Token * _literal; expr_ty y; void *z; if ( (y = star_named_expression_rule(p)) // star_named_expression && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (z = star_named_expressions_rule(p), 1) // star_named_expressions? ) { - res = _PyPegen_seq_insert_in_front ( p , y , z ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_insert_in_front ( p , y , z ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_101: yield_expr | named_expression @@ -15416,18 +15416,18 @@ _tmp_101_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // named_expression expr_ty named_expression_var; @@ -15435,14 +15435,14 @@ _tmp_101_rule(Parser *p) (named_expression_var = named_expression_rule(p)) // named_expression ) { - res = named_expression_var; + _res = named_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_103: ',' kvpair @@ -15452,54 +15452,54 @@ _loop0_103_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kvpair + Token * _literal; KeyValuePair* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kvpair_rule(p)) // kvpair ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_103"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_103_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_103_type, _seq); + return _seq; } // _gather_102: kvpair _loop0_103 @@ -15509,8 +15509,8 @@ _gather_102_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kvpair _loop0_103 KeyValuePair* elem; asdl_seq * seq; @@ -15520,14 +15520,14 @@ _gather_102_rule(Parser *p) (seq = _loop0_103_rule(p)) // _loop0_103 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_104: for_if_clause @@ -15537,50 +15537,50 @@ _loop1_104_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // for_if_clause comprehension_ty for_if_clause_var; while ( (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause ) { - res = for_if_clause_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = for_if_clause_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_104"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_104_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_104_type, _seq); + return _seq; } // _loop0_105: ('if' disjunction) @@ -15590,46 +15590,46 @@ _loop0_105_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('if' disjunction) void *_tmp_142_var; while ( (_tmp_142_var = _tmp_142_rule(p)) // 'if' disjunction ) { - res = _tmp_142_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_142_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_105"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_105_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_105_type, _seq); + return _seq; } // _loop0_106: ('if' disjunction) @@ -15639,46 +15639,46 @@ _loop0_106_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('if' disjunction) void *_tmp_143_var; while ( (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction ) { - res = _tmp_143_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_143_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_106"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_106_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_106_type, _seq); + return _seq; } // _tmp_107: ',' args @@ -15688,29 +15688,29 @@ _tmp_107_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' args + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = args_rule(p)) // args ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_108: ',' args @@ -15720,29 +15720,29 @@ _tmp_108_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' args + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = args_rule(p)) // args ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_110: ',' kwarg_or_starred @@ -15752,54 +15752,54 @@ _loop0_110_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_110"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_110_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_110_type, _seq); + return _seq; } // _gather_109: kwarg_or_starred _loop0_110 @@ -15809,8 +15809,8 @@ _gather_109_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_starred _loop0_110 KeywordOrStarred* elem; asdl_seq * seq; @@ -15820,14 +15820,14 @@ _gather_109_rule(Parser *p) (seq = _loop0_110_rule(p)) // _loop0_110 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_112: ',' kwarg_or_double_starred @@ -15837,54 +15837,54 @@ _loop0_112_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_double_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_112"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_112_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_112_type, _seq); + return _seq; } // _gather_111: kwarg_or_double_starred _loop0_112 @@ -15894,8 +15894,8 @@ _gather_111_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_double_starred _loop0_112 KeywordOrStarred* elem; asdl_seq * seq; @@ -15905,14 +15905,14 @@ _gather_111_rule(Parser *p) (seq = _loop0_112_rule(p)) // _loop0_112 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_114: ',' kwarg_or_starred @@ -15922,54 +15922,54 @@ _loop0_114_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_114"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_114_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_114_type, _seq); + return _seq; } // _gather_113: kwarg_or_starred _loop0_114 @@ -15979,8 +15979,8 @@ _gather_113_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_starred _loop0_114 KeywordOrStarred* elem; asdl_seq * seq; @@ -15990,14 +15990,14 @@ _gather_113_rule(Parser *p) (seq = _loop0_114_rule(p)) // _loop0_114 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_116: ',' kwarg_or_double_starred @@ -16007,54 +16007,54 @@ _loop0_116_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_double_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_116"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_116_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_116_type, _seq); + return _seq; } // _gather_115: kwarg_or_double_starred _loop0_116 @@ -16064,8 +16064,8 @@ _gather_115_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_double_starred _loop0_116 KeywordOrStarred* elem; asdl_seq * seq; @@ -16075,14 +16075,14 @@ _gather_115_rule(Parser *p) (seq = _loop0_116_rule(p)) // _loop0_116 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_117: (',' star_target) @@ -16092,46 +16092,46 @@ _loop0_117_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (',' star_target) void *_tmp_144_var; while ( (_tmp_144_var = _tmp_144_rule(p)) // ',' star_target ) { - res = _tmp_144_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_144_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_117"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_117_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_117_type, _seq); + return _seq; } // _loop0_119: ',' star_target @@ -16141,54 +16141,54 @@ _loop0_119_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' star_target + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = star_target_rule(p)) // star_target ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_119"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_119_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_119_type, _seq); + return _seq; } // _gather_118: star_target _loop0_119 @@ -16198,8 +16198,8 @@ _gather_118_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // star_target _loop0_119 expr_ty elem; asdl_seq * seq; @@ -16209,14 +16209,14 @@ _gather_118_rule(Parser *p) (seq = _loop0_119_rule(p)) // _loop0_119 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_120: !'*' star_target @@ -16226,8 +16226,8 @@ _tmp_120_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // !'*' star_target expr_ty star_target_var; if ( @@ -16236,14 +16236,14 @@ _tmp_120_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { - res = star_target_var; + _res = star_target_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_122: ',' del_target @@ -16253,54 +16253,54 @@ _loop0_122_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' del_target + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = del_target_rule(p)) // del_target ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_122"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_122_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_122_type, _seq); + return _seq; } // _gather_121: del_target _loop0_122 @@ -16310,8 +16310,8 @@ _gather_121_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // del_target _loop0_122 expr_ty elem; asdl_seq * seq; @@ -16321,14 +16321,14 @@ _gather_121_rule(Parser *p) (seq = _loop0_122_rule(p)) // _loop0_122 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_124: ',' target @@ -16338,54 +16338,54 @@ _loop0_124_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' target + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = target_rule(p)) // target ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_124"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_124_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_124_type, _seq); + return _seq; } // _gather_123: target _loop0_124 @@ -16395,8 +16395,8 @@ _gather_123_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // target _loop0_124 expr_ty elem; asdl_seq * seq; @@ -16406,14 +16406,14 @@ _gather_123_rule(Parser *p) (seq = _loop0_124_rule(p)) // _loop0_124 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_125: args | expression for_if_clauses @@ -16423,18 +16423,18 @@ _tmp_125_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // args expr_ty args_var; if ( (args_var = args_rule(p)) // args ) { - res = args_var; + _res = args_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // expression for_if_clauses expr_ty expression_var; @@ -16445,14 +16445,14 @@ _tmp_125_rule(Parser *p) (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); + _res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_126: '=' annotated_rhs @@ -16462,25 +16462,25 @@ _tmp_126_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' annotated_rhs + Token * _literal; expr_ty annotated_rhs_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs ) { - res = _PyPegen_dummy_name(p, literal, annotated_rhs_var); + _res = _PyPegen_dummy_name(p, _literal, annotated_rhs_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_127: '=' | augassign @@ -16490,18 +16490,18 @@ _tmp_127_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // augassign AugOperator* augassign_var; @@ -16509,14 +16509,14 @@ _tmp_127_rule(Parser *p) (augassign_var = augassign_rule(p)) // augassign ) { - res = augassign_var; + _res = augassign_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_128: yield_expr | star_expressions @@ -16526,18 +16526,18 @@ _tmp_128_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; @@ -16545,14 +16545,14 @@ _tmp_128_rule(Parser *p) (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_129: '[' | '(' | '{' @@ -16562,44 +16562,44 @@ _tmp_129_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '[' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '{' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_130: param_no_default @@ -16609,46 +16609,46 @@ _loop0_130_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_130"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_130_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_130_type, _seq); + return _seq; } // _tmp_131: slash_with_default | param_with_default+ @@ -16658,18 +16658,18 @@ _tmp_131_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // slash_with_default SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - res = slash_with_default_var; + _res = slash_with_default_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // param_with_default+ asdl_seq * _loop1_145_var; @@ -16677,14 +16677,14 @@ _tmp_131_rule(Parser *p) (_loop1_145_var = _loop1_145_rule(p)) // param_with_default+ ) { - res = _loop1_145_var; + _res = _loop1_145_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_132: ')' | ',' (')' | '**') @@ -16694,36 +16694,36 @@ _tmp_132_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ')' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 8)) // token=')' + (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // ',' (')' | '**') + Token * _literal; void *_tmp_146_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (_tmp_146_var = _tmp_146_rule(p)) // ')' | '**' ) { - res = _PyPegen_dummy_name(p, literal, _tmp_146_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_146_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_133: ':' | ',' (':' | '**') @@ -16733,36 +16733,36 @@ _tmp_133_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ':' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // ',' (':' | '**') + Token * _literal; void *_tmp_147_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (_tmp_147_var = _tmp_147_rule(p)) // ':' | '**' ) { - res = _PyPegen_dummy_name(p, literal, _tmp_147_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_147_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_134: star_targets '=' @@ -16772,29 +16772,29 @@ _tmp_134_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // star_targets '=' - Token * literal; + Token * _literal; expr_ty z; if ( (z = star_targets_rule(p)) // star_targets && - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_135: '.' | '...' @@ -16804,33 +16804,33 @@ _tmp_135_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '.' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '...' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 52)) // token='...' + (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_136: '.' | '...' @@ -16840,33 +16840,33 @@ _tmp_136_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '.' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '...' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 52)) // token='...' + (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_137: '@' named_expression NEWLINE @@ -16876,32 +16876,32 @@ _tmp_137_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '@' named_expression NEWLINE + Token * _literal; expr_ty f; - Token * literal; Token * newline_var; if ( - (literal = _PyPegen_expect_token(p, 49)) // token='@' + (_literal = _PyPegen_expect_token(p, 49)) // token='@' && (f = named_expression_rule(p)) // named_expression && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = f; - if (res == NULL && PyErr_Occurred()) { + _res = f; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_138: ',' star_expression @@ -16911,29 +16911,29 @@ _tmp_138_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' star_expression + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = star_expression_rule(p)) // star_expression ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_139: ',' expression @@ -16943,29 +16943,29 @@ _tmp_139_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' expression + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = expression_rule(p)) // expression ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_140: 'or' conjunction @@ -16975,29 +16975,29 @@ _tmp_140_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'or' conjunction + Token * _keyword; expr_ty c; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 532)) // token='or' + (_keyword = _PyPegen_expect_token(p, 532)) // token='or' && (c = conjunction_rule(p)) // conjunction ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_141: 'and' inversion @@ -17007,29 +17007,29 @@ _tmp_141_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'and' inversion + Token * _keyword; expr_ty c; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 533)) // token='and' + (_keyword = _PyPegen_expect_token(p, 533)) // token='and' && (c = inversion_rule(p)) // inversion ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_142: 'if' disjunction @@ -17039,29 +17039,29 @@ _tmp_142_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'if' disjunction - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_143: 'if' disjunction @@ -17071,29 +17071,29 @@ _tmp_143_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'if' disjunction - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_144: ',' star_target @@ -17103,29 +17103,29 @@ _tmp_144_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' star_target + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = star_target_rule(p)) // star_target ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_145: param_with_default @@ -17135,50 +17135,50 @@ _loop1_145_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_145"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_145_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_145_type, _seq); + return _seq; } // _tmp_146: ')' | '**' @@ -17188,33 +17188,33 @@ _tmp_146_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ')' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 8)) // token=')' + (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_147: ':' | '**' @@ -17224,33 +17224,33 @@ _tmp_147_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ':' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } void * diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index ffb18e47e4a9a..b55a652ac8060 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -132,7 +132,7 @@ void *_PyPegen_dummy_name(Parser *p, ...); #define UNUSED(expr) do { (void)(expr); } while (0) #define EXTRA_EXPR(head, tail) head->lineno, head->col_offset, tail->end_lineno, tail->end_col_offset, p->arena -#define EXTRA start_lineno, start_col_offset, end_lineno, end_col_offset, p->arena +#define EXTRA _start_lineno, _start_col_offset, _end_lineno, _end_col_offset, p->arena #define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 1, msg, ##__VA_ARGS__) #define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, 1, msg, ##__VA_ARGS__) #define RAISE_SYNTAX_ERROR_NO_COL_OFFSET(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 0, msg, ##__VA_ARGS__) diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index b7a9942c2fdd2..6c9aa3f2ba786 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -1,5 +1,5 @@ import ast -from dataclasses import dataclass, field +from dataclasses import dataclass import re from typing import Any, Dict, IO, Optional, List, Text, Tuple, Set from enum import Enum @@ -101,7 +101,7 @@ def keyword_helper(self, keyword: str) -> FunctionCall: if keyword not in self.keyword_cache: self.keyword_cache[keyword] = self.gen.keyword_type() return FunctionCall( - assigned_variable="keyword", + assigned_variable="_keyword", function="_PyPegen_expect_token", arguments=["p", self.keyword_cache[keyword]], return_type="Token *", @@ -140,7 +140,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type=type, - comment=f"{node}" + comment=f"{node}", ) def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: @@ -151,7 +151,7 @@ def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: assert val in self.exact_tokens, f"{node.value} is not a known literal" type = self.exact_tokens[val] return FunctionCall( - assigned_variable="literal", + assigned_variable="_literal", function=f"_PyPegen_expect_token", arguments=["p", type], nodetype=NodeTypes.GENERIC_TOKEN, @@ -175,8 +175,10 @@ def can_we_inline(node: Rhs) -> int: else: name = self.gen.name_node(node) self.cache[node] = FunctionCall( - assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], - comment=f"{node}" + assigned_variable=f"{name}_var", + function=f"{name}_rule", + arguments=["p"], + comment=f"{node}", ) return self.cache[node] @@ -217,11 +219,11 @@ def visit_NegativeLookahead(self, node: NegativeLookahead) -> FunctionCall: def visit_Opt(self, node: Opt) -> FunctionCall: call = self.visit(node.node) return FunctionCall( - assigned_variable="opt_var", + assigned_variable="_opt_var", function=call.function, arguments=call.arguments, force_true=True, - comment=f"{node}" + comment=f"{node}", ) def visit_Repeat0(self, node: Repeat0) -> FunctionCall: @@ -268,7 +270,7 @@ def visit_Group(self, node: Group) -> FunctionCall: def visit_Cut(self, node: Cut) -> FunctionCall: return FunctionCall( - assigned_variable="cut_var", + assigned_variable="_cut_var", return_type="int", function="1", nodetype=NodeTypes.CUT_OPERATOR, @@ -418,46 +420,46 @@ def _set_up_token_start_metadata_extraction(self) -> None: self.print("p->error_indicator = 1;") self.print("return NULL;") self.print("}") - self.print("int start_lineno = p->tokens[mark]->lineno;") - self.print("UNUSED(start_lineno); // Only used by EXTRA macro") - self.print("int start_col_offset = p->tokens[mark]->col_offset;") - self.print("UNUSED(start_col_offset); // Only used by EXTRA macro") + self.print("int _start_lineno = p->tokens[_mark]->lineno;") + self.print("UNUSED(_start_lineno); // Only used by EXTRA macro") + self.print("int _start_col_offset = p->tokens[_mark]->col_offset;") + self.print("UNUSED(_start_col_offset); // Only used by EXTRA macro") def _set_up_token_end_metadata_extraction(self) -> None: - self.print("Token *token = _PyPegen_get_last_nonnwhitespace_token(p);") - self.print("if (token == NULL) {") + self.print("Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);") + self.print("if (_token == NULL) {") with self.indent(): self.print("return NULL;") self.print("}") - self.print(f"int end_lineno = token->end_lineno;") - self.print("UNUSED(end_lineno); // Only used by EXTRA macro") - self.print(f"int end_col_offset = token->end_col_offset;") - self.print("UNUSED(end_col_offset); // Only used by EXTRA macro") + self.print("int _end_lineno = _token->end_lineno;") + self.print("UNUSED(_end_lineno); // Only used by EXTRA macro") + self.print("int _end_col_offset = _token->end_col_offset;") + self.print("UNUSED(_end_col_offset); // Only used by EXTRA macro") def _set_up_rule_memoization(self, node: Rule, result_type: str) -> None: self.print("{") with self.indent(): - self.print(f"{result_type} res = NULL;") - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &res))") + self.print(f"{result_type} _res = NULL;") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") with self.indent(): - self.print("return res;") - self.print("int mark = p->mark;") - self.print("int resmark = p->mark;") + self.print("return _res;") + self.print("int _mark = p->mark;") + self.print("int _resmark = p->mark;") self.print("while (1) {") with self.indent(): self.call_with_errorcheck_return( - f"_PyPegen_update_memo(p, mark, {node.name}_type, res)", "res" + f"_PyPegen_update_memo(p, _mark, {node.name}_type, _res)", "_res" ) - self.print("p->mark = mark;") - self.print(f"void *raw = {node.name}_raw(p);") - self.print("if (raw == NULL || p->mark <= resmark)") + self.print("p->mark = _mark;") + self.print(f"void *_raw = {node.name}_raw(p);") + self.print("if (_raw == NULL || p->mark <= _resmark)") with self.indent(): self.print("break;") - self.print("resmark = p->mark;") - self.print("res = raw;") + self.print(f"_resmark = p->mark;") + self.print("_res = _raw;") self.print("}") - self.print("p->mark = resmark;") - self.print("return res;") + self.print(f"p->mark = _resmark;") + self.print("return _res;") self.print("}") self.print(f"static {result_type}") self.print(f"{node.name}_raw(Parser *p)") @@ -473,12 +475,12 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N with self.indent(): self.print("return NULL;") self.print("}") - self.print(f"{result_type} res = NULL;") + self.print(f"{result_type} _res = NULL;") if memoize: - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &res))") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") with self.indent(): - self.print("return res;") - self.print("int mark = p->mark;") + self.print("return _res;") + self.print("int _mark = p->mark;") if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): self._set_up_token_start_metadata_extraction() self.visit( @@ -488,13 +490,13 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N rulename=node.name if memoize else None, ) if self.debug: - self.print(f'fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') - self.print("res = NULL;") + self.print('fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') + self.print("_res = NULL;") self.print(" done:") with self.indent(): if memoize: - self.print(f"_PyPegen_insert_memo(p, mark, {node.name}_type, res);") - self.print("return res;") + self.print(f"_PyPegen_insert_memo(p, _mark, {node.name}_type, _res);") + self.print("return _res;") def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: memoize = self._should_memoize(node) @@ -505,17 +507,17 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: with self.indent(): self.print("return NULL;") self.print("}") - self.print(f"void *res = NULL;") + self.print("void *_res = NULL;") if memoize: - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &res))") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") with self.indent(): - self.print("return res;") - self.print("int mark = p->mark;") - self.print("int start_mark = p->mark;") - self.print("void **children = PyMem_Malloc(sizeof(void *));") - self.out_of_memory_return(f"!children", "NULL") - self.print("ssize_t children_capacity = 1;") - self.print("ssize_t n = 0;") + self.print("return _res;") + self.print("int _mark = p->mark;") + self.print("int _start_mark = p->mark;") + self.print("void **_children = PyMem_Malloc(sizeof(void *));") + self.out_of_memory_return(f"!_children", "NULL") + self.print("ssize_t _children_capacity = 1;") + self.print("ssize_t _n = 0;") if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): self._set_up_token_start_metadata_extraction() self.visit( @@ -525,23 +527,23 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: rulename=node.name if memoize else None, ) if is_repeat1: - self.print("if (n == 0 || p->error_indicator) {") + self.print("if (_n == 0 || p->error_indicator) {") with self.indent(): - self.print("PyMem_Free(children);") + self.print("PyMem_Free(_children);") self.print("return NULL;") self.print("}") - self.print("asdl_seq *seq = _Py_asdl_seq_new(n, p->arena);") + self.print("asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena);") self.out_of_memory_return( - f"!seq", + "!_seq", "NULL", message=f"asdl_seq_new {node.name}", - cleanup_code="PyMem_Free(children);", + cleanup_code="PyMem_Free(_children);", ) - self.print("for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]);") - self.print("PyMem_Free(children);") + self.print("for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]);") + self.print("PyMem_Free(_children);") if node.name: - self.print(f"_PyPegen_insert_memo(p, start_mark, {node.name}_type, seq);") - self.print("return seq;") + self.print(f"_PyPegen_insert_memo(p, _start_mark, {node.name}_type, _seq);") + self.print("return _seq;") def visit_Rule(self, node: Rule) -> None: is_loop = node.is_loop() @@ -599,9 +601,9 @@ def join_conditions(self, keyword: str, node: Any) -> None: self.print(")") def emit_action(self, node: Alt, cleanup_code: Optional[str] = None) -> None: - self.print(f"res = {node.action};") + self.print(f"_res = {node.action};") - self.print("if (res == NULL && PyErr_Occurred()) {") + self.print("if (_res == NULL && PyErr_Occurred()) {") with self.indent(): self.print("p->error_indicator = 1;") if cleanup_code: @@ -611,7 +613,7 @@ def emit_action(self, node: Alt, cleanup_code: Optional[str] = None) -> None: if self.debug: self.print( - f'fprintf(stderr, "Hit with action [%d-%d]: %s\\n", mark, p->mark, "{node}");' + f'fprintf(stderr, "Hit with action [%d-%d]: %s\\n", _mark, p->mark, "{node}");' ) def emit_default_action(self, is_gather: bool, node: Alt) -> None: @@ -619,7 +621,7 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: if is_gather: assert len(self.local_variable_names) == 2 self.print( - f"res = _PyPegen_seq_insert_in_front(p, " + f"_res = _PyPegen_seq_insert_in_front(p, " f"{self.local_variable_names[0]}, {self.local_variable_names[1]});" ) else: @@ -628,17 +630,17 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: f'fprintf(stderr, "Hit without action [%d:%d]: %s\\n", mark, p->mark, "{node}");' ) self.print( - f"res = _PyPegen_dummy_name(p, {', '.join(self.local_variable_names)});" + f"_res = _PyPegen_dummy_name(p, {', '.join(self.local_variable_names)});" ) else: if self.debug: self.print( f'fprintf(stderr, "Hit with default action [%d:%d]: %s\\n", mark, p->mark, "{node}");' ) - self.print(f"res = {self.local_variable_names[0]};") + self.print(f"_res = {self.local_variable_names[0]};") def emit_dummy_action(self) -> None: - self.print(f"res = _PyPegen_dummy_name(p);") + self.print("_res = _PyPegen_dummy_name(p);") def handle_alt_normal(self, node: Alt, is_gather: bool) -> None: self.join_conditions(keyword="if", node=node) @@ -671,20 +673,22 @@ def handle_alt_loop(self, node: Alt, is_gather: bool, rulename: Optional[str]) - if self.skip_actions: self.emit_dummy_action() elif node.action: - self.emit_action(node, cleanup_code="PyMem_Free(children);") + self.emit_action(node, cleanup_code="PyMem_Free(_children);") else: self.emit_default_action(is_gather, node) # Add the result of rule to the temporary buffer of children. This buffer # will populate later an asdl_seq with all elements to return. - self.print("if (n == children_capacity) {") + self.print("if (_n == _children_capacity) {") with self.indent(): - self.print("children_capacity *= 2;") - self.print("children = PyMem_Realloc(children, children_capacity*sizeof(void *));") - self.out_of_memory_return(f"!children", "NULL", message=f"realloc {rulename}") + self.print("_children_capacity *= 2;") + self.print( + "_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));" + ) + self.out_of_memory_return(f"!_children", "NULL", message=f"realloc {rulename}") self.print("}") - self.print(f"children[n++] = res;") - self.print("mark = p->mark;") + self.print("_children[_n++] = _res;") + self.print("_mark = p->mark;") self.print("}") def visit_Alt( @@ -699,11 +703,11 @@ def visit_Alt( var_type = "void *" else: var_type += " " - if v == "cut_var": + if v == "_cut_var": v += " = 0" # cut_var must be initialized self.print(f"{var_type}{v};") - if v == "opt_var": - self.print("UNUSED(opt_var); // Silence compiler warnings") + if v == "_opt_var": + self.print("UNUSED(_opt_var); // Silence compiler warnings") with self.local_variable_context(): if is_loop: @@ -711,9 +715,9 @@ def visit_Alt( else: self.handle_alt_normal(node, is_gather) - self.print("p->mark = mark;") - if "cut_var" in vars: - self.print("if (cut_var) return NULL;") + self.print("p->mark = _mark;") + if "_cut_var" in vars: + self.print("if (_cut_var) return NULL;") self.print("}") def collect_vars(self, node: Alt) -> Dict[Optional[str], Optional[str]]: diff --git a/Tools/peg_generator/pegen/parser_generator.py b/Tools/peg_generator/pegen/parser_generator.py index 03452510b9669..689022b12da20 100644 --- a/Tools/peg_generator/pegen/parser_generator.py +++ b/Tools/peg_generator/pegen/parser_generator.py @@ -27,6 +27,11 @@ def visit_NameLeaf(self, node: NameLeaf) -> None: # TODO: Add line/col info to (leaf) nodes raise GrammarError(f"Dangling reference to rule {node.value!r}") + def visit_NamedItem(self, node: NameLeaf) -> None: + if node.name and node.name.startswith("_"): + raise GrammarError(f"Variable names cannot start with underscore: '{node.name}'") + self.visit(node.item) + class ParserGenerator: @@ -36,6 +41,7 @@ def __init__(self, grammar: Grammar, tokens: Dict[int, str], file: Optional[IO[T self.grammar = grammar self.tokens = tokens self.rules = grammar.rules + self.validate_rule_names() if "trailer" not in grammar.metas and "start" not in self.rules: raise GrammarError("Grammar without a trailer must have a 'start' rule") checker = RuleCheckingVisitor(self.rules, self.tokens) @@ -51,6 +57,11 @@ def __init__(self, grammar: Grammar, tokens: Dict[int, str], file: Optional[IO[T self.all_rules: Dict[str, Rule] = {} # Rules + temporal rules self._local_variable_stack: List[List[str]] = [] + def validate_rule_names(self): + for rule in self.rules: + if rule.startswith("_"): + raise GrammarError(f"Rule names cannot start with underscore: '{rule}'") + @contextlib.contextmanager def local_variable_context(self) -> Iterator[None]: self._local_variable_stack.append([]) From webhook-mailer at python.org Sun May 10 01:15:59 2020 From: webhook-mailer at python.org (Andre Delfino) Date: Sun, 10 May 2020 05:15:59 -0000 Subject: [Python-checkins] Add link to Enum class (GH-19884) Message-ID: https://github.com/python/cpython/commit/85bdec1def789cdb60ab7ffe115e426267b00a60 commit: 85bdec1def789cdb60ab7ffe115e426267b00a60 branch: master author: Andre Delfino committer: GitHub date: 2020-05-10T10:45:54+05:30 summary: Add link to Enum class (GH-19884) files: M Doc/library/types.rst diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 1d081e2c54868..cdddb46783a47 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -379,7 +379,7 @@ Additional Utility Classes and Functions class's __getattr__ method; this is done by raising AttributeError. This allows one to have properties active on an instance, and have virtual - attributes on the class with the same name (see Enum for an example). + attributes on the class with the same name (see :class:`enum.Enum` for an example). .. versionadded:: 3.4 From webhook-mailer at python.org Sun May 10 04:53:20 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sun, 10 May 2020 08:53:20 -0000 Subject: [Python-checkins] bpo-40397: Remove __args__ and __parameters__ from _SpecialGenericAlias (GH-19984) Message-ID: https://github.com/python/cpython/commit/fcb285609a2e55f2dc63dcfbb32e4e2fddf71546 commit: fcb285609a2e55f2dc63dcfbb32e4e2fddf71546 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-10T11:53:16+03:00 summary: bpo-40397: Remove __args__ and __parameters__ from _SpecialGenericAlias (GH-19984) files: A Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst M Lib/typing.py diff --git a/Lib/typing.py b/Lib/typing.py index 681ab6d21e0a3..e31fc99e02245 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -186,14 +186,13 @@ def _collect_type_vars(types): return tuple(tvars) -def _check_generic(cls, parameters): +def _check_generic(cls, parameters, elen): """Check correct count for parameters of a generic cls (internal helper). This gives a nice error message in case of count mismatch. """ - if not cls.__parameters__: + if not elen: raise TypeError(f"{cls} is not a generic class") alen = len(parameters) - elen = len(cls.__parameters__) if alen != elen: raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" f" actual {alen}, expected {elen}") @@ -592,17 +591,6 @@ def __reduce__(self): return self.__name__ -# Special typing constructs Union, Optional, Generic, Callable and Tuple -# use three special attributes for internal bookkeeping of generic types: -# * __parameters__ is a tuple of unique free type parameters of a generic -# type, for example, Dict[T, T].__parameters__ == (T,); -# * __origin__ keeps a reference to a type that was subscripted, -# e.g., Union[T, int].__origin__ == Union, or the non-generic version of -# the type. -# * __args__ is a tuple of all arguments used in subscripting, -# e.g., Dict[T, int].__args__ == (T, int). - - def _is_dunder(attr): return attr.startswith('__') and attr.endswith('__') @@ -615,28 +603,11 @@ class _BaseGenericAlias(_Final, _root=True): have 'name' always set. If 'inst' is False, then the alias can't be instantiated, this is used by e.g. typing.List and typing.Dict. """ - def __init__(self, origin, params, *, inst=True, name=None): + def __init__(self, origin, *, inst=True, name=None): self._inst = inst self._name = name - if not isinstance(params, tuple): - params = (params,) self.__origin__ = origin - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in params) - self.__parameters__ = _collect_type_vars(params) self.__slots__ = None # This is not documented. - if not name: - self.__module__ = origin.__module__ - - def __eq__(self, other): - if not isinstance(other, _BaseGenericAlias): - return NotImplemented - return (self.__origin__ == other.__origin__ - and self.__args__ == other.__args__) - - def __hash__(self): - return hash((self.__origin__, self.__args__)) def __call__(self, *args, **kwargs): if not self._inst: @@ -669,7 +640,7 @@ def __getattr__(self, attr): raise AttributeError(attr) def __setattr__(self, attr, val): - if _is_dunder(attr) or attr in ('_name', '_inst'): + if _is_dunder(attr) or attr in ('_name', '_inst', '_nparams'): super().__setattr__(attr, val) else: setattr(self.__origin__, attr, val) @@ -682,7 +653,38 @@ def __subclasscheck__(self, cls): " class and instance checks") +# Special typing constructs Union, Optional, Generic, Callable and Tuple +# use three special attributes for internal bookkeeping of generic types: +# * __parameters__ is a tuple of unique free type parameters of a generic +# type, for example, Dict[T, T].__parameters__ == (T,); +# * __origin__ keeps a reference to a type that was subscripted, +# e.g., Union[T, int].__origin__ == Union, or the non-generic version of +# the type. +# * __args__ is a tuple of all arguments used in subscripting, +# e.g., Dict[T, int].__args__ == (T, int). + + class _GenericAlias(_BaseGenericAlias, _root=True): + def __init__(self, origin, params, *, inst=True, name=None): + super().__init__(origin, inst=inst, name=name) + if not isinstance(params, tuple): + params = (params,) + self.__args__ = tuple(... if a is _TypingEllipsis else + () if a is _TypingEmpty else + a for a in params) + self.__parameters__ = _collect_type_vars(params) + if not name: + self.__module__ = origin.__module__ + + def __eq__(self, other): + if not isinstance(other, _GenericAlias): + return NotImplemented + return (self.__origin__ == other.__origin__ + and self.__args__ == other.__args__) + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + @_tp_cache def __getitem__(self, params): if self.__origin__ in (Generic, Protocol): @@ -692,14 +694,14 @@ def __getitem__(self, params): params = (params,) msg = "Parameters to generic types must be types." params = tuple(_type_check(p, msg) for p in params) - _check_generic(self, params) + _check_generic(self, params, len(self.__parameters__)) subst = dict(zip(self.__parameters__, params)) new_args = [] for arg in self.__args__: if isinstance(arg, TypeVar): arg = subst[arg] - elif isinstance(arg, (_BaseGenericAlias, GenericAlias)): + elif isinstance(arg, (_GenericAlias, GenericAlias)): subargs = tuple(subst[x] for x in arg.__parameters__) arg = arg[subargs] new_args.append(arg) @@ -739,11 +741,16 @@ def __mro_entries__(self, bases): return (self.__origin__,) +# _nparams is the number of accepted parameters, e.g. 0 for Hashable, +# 1 for List and 2 for Dict. It may be -1 if variable number of +# parameters are accepted (needs custom __getitem__). + class _SpecialGenericAlias(_BaseGenericAlias, _root=True): - def __init__(self, origin, params, *, inst=True, name=None): + def __init__(self, origin, nparams, *, inst=True, name=None): if name is None: name = origin.__name__ - super().__init__(origin, params, inst=inst, name=name) + super().__init__(origin, inst=inst, name=name) + self._nparams = nparams self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}' @_tp_cache @@ -752,8 +759,7 @@ def __getitem__(self, params): params = (params,) msg = "Parameters to generic types must be types." params = tuple(_type_check(p, msg) for p in params) - _check_generic(self, params) - assert self.__args__ == self.__parameters__ + _check_generic(self, params, self._nparams) return self.copy_with(params) def copy_with(self, params): @@ -912,7 +918,7 @@ def __class_getitem__(cls, params): f"Parameters to {cls.__name__}[...] must all be unique") else: # Subscripting a regular Generic subclass. - _check_generic(cls, params) + _check_generic(cls, params, len(cls.__parameters__)) return _GenericAlias(cls, params) def __init_subclass__(cls, *args, **kwargs): @@ -1571,18 +1577,18 @@ class Other(Leaf): # Error reported by type checker # Various ABCs mimicking those in collections.abc. _alias = _SpecialGenericAlias -Hashable = _alias(collections.abc.Hashable, ()) # Not generic. -Awaitable = _alias(collections.abc.Awaitable, T_co) -Coroutine = _alias(collections.abc.Coroutine, (T_co, T_contra, V_co)) -AsyncIterable = _alias(collections.abc.AsyncIterable, T_co) -AsyncIterator = _alias(collections.abc.AsyncIterator, T_co) -Iterable = _alias(collections.abc.Iterable, T_co) -Iterator = _alias(collections.abc.Iterator, T_co) -Reversible = _alias(collections.abc.Reversible, T_co) -Sized = _alias(collections.abc.Sized, ()) # Not generic. -Container = _alias(collections.abc.Container, T_co) -Collection = _alias(collections.abc.Collection, T_co) -Callable = _CallableType(collections.abc.Callable, ()) +Hashable = _alias(collections.abc.Hashable, 0) # Not generic. +Awaitable = _alias(collections.abc.Awaitable, 1) +Coroutine = _alias(collections.abc.Coroutine, 3) +AsyncIterable = _alias(collections.abc.AsyncIterable, 1) +AsyncIterator = _alias(collections.abc.AsyncIterator, 1) +Iterable = _alias(collections.abc.Iterable, 1) +Iterator = _alias(collections.abc.Iterator, 1) +Reversible = _alias(collections.abc.Reversible, 1) +Sized = _alias(collections.abc.Sized, 0) # Not generic. +Container = _alias(collections.abc.Container, 1) +Collection = _alias(collections.abc.Collection, 1) +Callable = _CallableType(collections.abc.Callable, 2) Callable.__doc__ = \ """Callable type; Callable[[int], str] is a function of (int) -> str. @@ -1593,15 +1599,16 @@ class Other(Leaf): # Error reported by type checker There is no syntax to indicate optional or keyword arguments, such function types are rarely used as callback types. """ -AbstractSet = _alias(collections.abc.Set, T_co, name='AbstractSet') -MutableSet = _alias(collections.abc.MutableSet, T) +AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet') +MutableSet = _alias(collections.abc.MutableSet, 1) # NOTE: Mapping is only covariant in the value type. -Mapping = _alias(collections.abc.Mapping, (KT, VT_co)) -MutableMapping = _alias(collections.abc.MutableMapping, (KT, VT)) -Sequence = _alias(collections.abc.Sequence, T_co) -MutableSequence = _alias(collections.abc.MutableSequence, T) -ByteString = _alias(collections.abc.ByteString, ()) # Not generic -Tuple = _TupleType(tuple, (), inst=False, name='Tuple') +Mapping = _alias(collections.abc.Mapping, 2) +MutableMapping = _alias(collections.abc.MutableMapping, 2) +Sequence = _alias(collections.abc.Sequence, 1) +MutableSequence = _alias(collections.abc.MutableSequence, 1) +ByteString = _alias(collections.abc.ByteString, 0) # Not generic +# Tuple accepts variable number of parameters. +Tuple = _TupleType(tuple, -1, inst=False, name='Tuple') Tuple.__doc__ = \ """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. @@ -1611,24 +1618,24 @@ class Other(Leaf): # Error reported by type checker To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. """ -List = _alias(list, T, inst=False, name='List') -Deque = _alias(collections.deque, T, name='Deque') -Set = _alias(set, T, inst=False, name='Set') -FrozenSet = _alias(frozenset, T_co, inst=False, name='FrozenSet') -MappingView = _alias(collections.abc.MappingView, T_co) -KeysView = _alias(collections.abc.KeysView, KT) -ItemsView = _alias(collections.abc.ItemsView, (KT, VT_co)) -ValuesView = _alias(collections.abc.ValuesView, VT_co) -ContextManager = _alias(contextlib.AbstractContextManager, T_co, name='ContextManager') -AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, T_co, name='AsyncContextManager') -Dict = _alias(dict, (KT, VT), inst=False, name='Dict') -DefaultDict = _alias(collections.defaultdict, (KT, VT), name='DefaultDict') -OrderedDict = _alias(collections.OrderedDict, (KT, VT)) -Counter = _alias(collections.Counter, T) -ChainMap = _alias(collections.ChainMap, (KT, VT)) -Generator = _alias(collections.abc.Generator, (T_co, T_contra, V_co)) -AsyncGenerator = _alias(collections.abc.AsyncGenerator, (T_co, T_contra)) -Type = _alias(type, CT_co, inst=False, name='Type') +List = _alias(list, 1, inst=False, name='List') +Deque = _alias(collections.deque, 1, name='Deque') +Set = _alias(set, 1, inst=False, name='Set') +FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet') +MappingView = _alias(collections.abc.MappingView, 1) +KeysView = _alias(collections.abc.KeysView, 1) +ItemsView = _alias(collections.abc.ItemsView, 2) +ValuesView = _alias(collections.abc.ValuesView, 1) +ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager') +AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager') +Dict = _alias(dict, 2, inst=False, name='Dict') +DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict') +OrderedDict = _alias(collections.OrderedDict, 2) +Counter = _alias(collections.Counter, 1) +ChainMap = _alias(collections.ChainMap, 2) +Generator = _alias(collections.abc.Generator, 3) +AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2) +Type = _alias(type, 1, inst=False, name='Type') Type.__doc__ = \ """A special construct usable to annotate class objects. @@ -2122,8 +2129,8 @@ class io: io.__name__ = __name__ + '.io' sys.modules[io.__name__] = io -Pattern = _alias(stdlib_re.Pattern, AnyStr) -Match = _alias(stdlib_re.Match, AnyStr) +Pattern = _alias(stdlib_re.Pattern, 1) +Match = _alias(stdlib_re.Match, 1) class re: """Wrapper namespace for re type aliases.""" diff --git a/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst b/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst new file mode 100644 index 0000000000000..46e806a2dc222 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst @@ -0,0 +1,2 @@ +Removed attributes ``__args__`` and ``__parameters__`` from special generic +aliases like ``typing.List`` (not subscripted). From webhook-mailer at python.org Sun May 10 05:05:34 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Sun, 10 May 2020 09:05:34 -0000 Subject: [Python-checkins] bpo-40549: Convert posixmodule.c to multiphase init (GH-19982) Message-ID: https://github.com/python/cpython/commit/1c2fa781560608aa4be50c748d4b3f403cfa5035 commit: 1c2fa781560608aa4be50c748d4b3f403cfa5035 branch: master author: Victor Stinner committer: GitHub date: 2020-05-10T11:05:29+02:00 summary: bpo-40549: Convert posixmodule.c to multiphase init (GH-19982) Convert posixmodule.c ("posix" or "nt" module) to the multiphase initialization (PEP 489). * Create the module using PyModuleDef_Init(). * Create ScandirIteratorType and DirEntryType with the new PyType_FromModuleAndSpec() (PEP 573) * Get the module state from ScandirIteratorType and DirEntryType with the new PyType_GetModule() (PEP 573) * Pass module to functions which access the module state. * convert_sched_param() gets a new module parameter. It is now called directly since Argument Clinic doesn't support passing the module to an argument converter callback. * Remove _posixstate_global macro. files: A Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst M Modules/clinic/posixmodule.c.h M Modules/posixmodule.c diff --git a/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst b/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst new file mode 100644 index 0000000000000..873ff49c1eb00 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst @@ -0,0 +1,2 @@ +Convert posixmodule.c ("posix" or "nt" module) to the multiphase +initialization (PEP 489). diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index a2b4566443b51..cf6d7449bac83 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -2886,7 +2886,7 @@ PyDoc_STRVAR(os_sched_setscheduler__doc__, static PyObject * os_sched_setscheduler_impl(PyObject *module, pid_t pid, int policy, - struct sched_param *param); + PyObject *param_obj); static PyObject * os_sched_setscheduler(PyObject *module, PyObject *const *args, Py_ssize_t nargs) @@ -2894,13 +2894,13 @@ os_sched_setscheduler(PyObject *module, PyObject *const *args, Py_ssize_t nargs) PyObject *return_value = NULL; pid_t pid; int policy; - struct sched_param param; + PyObject *param_obj; - if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "iO&:sched_setscheduler", - &pid, &policy, convert_sched_param, ¶m)) { + if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "iO:sched_setscheduler", + &pid, &policy, ¶m_obj)) { goto exit; } - return_value = os_sched_setscheduler_impl(module, pid, policy, ¶m); + return_value = os_sched_setscheduler_impl(module, pid, policy, param_obj); exit: return return_value; @@ -2957,21 +2957,20 @@ PyDoc_STRVAR(os_sched_setparam__doc__, {"sched_setparam", (PyCFunction)(void(*)(void))os_sched_setparam, METH_FASTCALL, os_sched_setparam__doc__}, static PyObject * -os_sched_setparam_impl(PyObject *module, pid_t pid, - struct sched_param *param); +os_sched_setparam_impl(PyObject *module, pid_t pid, PyObject *param_obj); static PyObject * os_sched_setparam(PyObject *module, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; pid_t pid; - struct sched_param param; + PyObject *param_obj; - if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "O&:sched_setparam", - &pid, convert_sched_param, ¶m)) { + if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "O:sched_setparam", + &pid, ¶m_obj)) { goto exit; } - return_value = os_sched_setparam_impl(module, pid, ¶m); + return_value = os_sched_setparam_impl(module, pid, param_obj); exit: return return_value; @@ -9418,4 +9417,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=ba73b68f1c435ff6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=be90d3aba972098b input=a9049054013a1b77]*/ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 0163b0757aefa..60a60e9aed76b 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -834,7 +834,6 @@ typedef struct { PyObject *st_mode; } _posixstate; -static struct PyModuleDef posixmodule; static inline _posixstate* get_posix_state(PyObject *module) @@ -844,8 +843,6 @@ get_posix_state(PyObject *module) return (_posixstate *)state; } -#define _posixstate_global ((_posixstate *)PyModule_GetState(PyState_FindModule(&posixmodule))) - /* * A PyArg_ParseTuple "converter" function * that handles filesystem paths in the manner @@ -2156,7 +2153,7 @@ _posix_free(void *module) } static void -fill_time(PyObject *v, int index, time_t sec, unsigned long nsec) +fill_time(PyObject *module, PyObject *v, int index, time_t sec, unsigned long nsec) { PyObject *s = _PyLong_FromTime_t(sec); PyObject *ns_fractional = PyLong_FromUnsignedLong(nsec); @@ -2167,7 +2164,7 @@ fill_time(PyObject *v, int index, time_t sec, unsigned long nsec) if (!(s && ns_fractional)) goto exit; - s_in_ns = PyNumber_Multiply(s, _posixstate_global->billion); + s_in_ns = PyNumber_Multiply(s, get_posix_state(module)->billion); if (!s_in_ns) goto exit; @@ -2197,10 +2194,10 @@ fill_time(PyObject *v, int index, time_t sec, unsigned long nsec) /* pack a system stat C structure into the Python stat tuple (used by posix_stat() and posix_fstat()) */ static PyObject* -_pystat_fromstructstat(STRUCT_STAT *st) +_pystat_fromstructstat(PyObject *module, STRUCT_STAT *st) { unsigned long ansec, mnsec, cnsec; - PyObject *StatResultType = _posixstate_global->StatResultType; + PyObject *StatResultType = get_posix_state(module)->StatResultType; PyObject *v = PyStructSequence_New((PyTypeObject *)StatResultType); if (v == NULL) return NULL; @@ -2239,9 +2236,9 @@ _pystat_fromstructstat(STRUCT_STAT *st) #else ansec = mnsec = cnsec = 0; #endif - fill_time(v, 7, st->st_atime, ansec); - fill_time(v, 8, st->st_mtime, mnsec); - fill_time(v, 9, st->st_ctime, cnsec); + fill_time(module, v, 7, st->st_atime, ansec); + fill_time(module, v, 8, st->st_mtime, mnsec); + fill_time(module, v, 9, st->st_ctime, cnsec); #ifdef HAVE_STRUCT_STAT_ST_BLKSIZE PyStructSequence_SET_ITEM(v, ST_BLKSIZE_IDX, @@ -2303,7 +2300,7 @@ _pystat_fromstructstat(STRUCT_STAT *st) static PyObject * -posix_do_stat(const char *function_name, path_t *path, +posix_do_stat(PyObject *module, const char *function_name, path_t *path, int dir_fd, int follow_symlinks) { STRUCT_STAT st; @@ -2348,7 +2345,7 @@ posix_do_stat(const char *function_name, path_t *path, return path_error(path); } - return _pystat_fromstructstat(&st); + return _pystat_fromstructstat(module, &st); } /*[python input] @@ -2643,13 +2640,8 @@ class confstr_confname_converter(path_confname_converter): class sysconf_confname_converter(path_confname_converter): converter="conv_sysconf_confname" -class sched_param_converter(CConverter): - type = 'struct sched_param' - converter = 'convert_sched_param' - impl_by_reference = True; - [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=418fce0e01144461]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=f1c8ae8d744f6c8b]*/ /*[clinic input] @@ -2686,7 +2678,7 @@ static PyObject * os_stat_impl(PyObject *module, path_t *path, int dir_fd, int follow_symlinks) /*[clinic end generated code: output=7d4976e6f18a59c5 input=01d362ebcc06996b]*/ { - return posix_do_stat("stat", path, dir_fd, follow_symlinks); + return posix_do_stat(module, "stat", path, dir_fd, follow_symlinks); } @@ -2710,7 +2702,7 @@ os_lstat_impl(PyObject *module, path_t *path, int dir_fd) /*[clinic end generated code: output=ef82a5d35ce8ab37 input=0b7474765927b925]*/ { int follow_symlinks = 0; - return posix_do_stat("lstat", path, dir_fd, follow_symlinks); + return posix_do_stat(module, "lstat", path, dir_fd, follow_symlinks); } @@ -4852,11 +4844,11 @@ utime_default(utime_t *ut, const char *path) #endif static int -split_py_long_to_s_and_ns(PyObject *py_long, time_t *s, long *ns) +split_py_long_to_s_and_ns(PyObject *module, PyObject *py_long, time_t *s, long *ns) { int result = 0; PyObject *divmod; - divmod = PyNumber_Divmod(py_long, _posixstate_global->billion); + divmod = PyNumber_Divmod(py_long, get_posix_state(module)->billion); if (!divmod) goto exit; if (!PyTuple_Check(divmod) || PyTuple_GET_SIZE(divmod) != 2) { @@ -4968,9 +4960,9 @@ os_utime_impl(PyObject *module, path_t *path, PyObject *times, PyObject *ns, return NULL; } utime.now = 0; - if (!split_py_long_to_s_and_ns(PyTuple_GET_ITEM(ns, 0), + if (!split_py_long_to_s_and_ns(module, PyTuple_GET_ITEM(ns, 0), &utime.atime_s, &utime.atime_ns) || - !split_py_long_to_s_and_ns(PyTuple_GET_ITEM(ns, 1), + !split_py_long_to_s_and_ns(module, PyTuple_GET_ITEM(ns, 1), &utime.mtime_s, &utime.mtime_ns)) { return NULL; } @@ -5421,11 +5413,11 @@ enum posix_spawn_file_actions_identifier { #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) static int -convert_sched_param(PyObject *param, struct sched_param *res); +convert_sched_param(PyObject *module, PyObject *param, struct sched_param *res); #endif static int -parse_posix_spawn_flags(const char *func_name, PyObject *setpgroup, +parse_posix_spawn_flags(PyObject *module, const char *func_name, PyObject *setpgroup, int resetids, int setsid, PyObject *setsigmask, PyObject *setsigdef, PyObject *scheduler, posix_spawnattr_t *attrp) @@ -5495,11 +5487,15 @@ parse_posix_spawn_flags(const char *func_name, PyObject *setpgroup, if (scheduler) { #ifdef POSIX_SPAWN_SETSCHEDULER PyObject *py_schedpolicy; + PyObject *schedparam_obj; struct sched_param schedparam; - if (!PyArg_ParseTuple(scheduler, "OO&" + if (!PyArg_ParseTuple(scheduler, "OO" ";A scheduler tuple must have two elements", - &py_schedpolicy, convert_sched_param, &schedparam)) { + &py_schedpolicy, &schedparam_obj)) { + goto fail; + } + if (!convert_sched_param(module, schedparam_obj, &schedparam)) { goto fail; } if (py_schedpolicy != Py_None) { @@ -5728,7 +5724,7 @@ py_posix_spawn(int use_posix_spawnp, PyObject *module, path_t *path, PyObject *a file_actionsp = &file_actions_buf; } - if (parse_posix_spawn_flags(func_name, setpgroup, resetids, setsid, + if (parse_posix_spawn_flags(module, func_name, setpgroup, resetids, setsid, setsigmask, setsigdef, scheduler, &attr)) { goto exit; } @@ -6378,11 +6374,11 @@ static PyStructSequence_Desc sched_param_desc = { }; static int -convert_sched_param(PyObject *param, struct sched_param *res) +convert_sched_param(PyObject *module, PyObject *param, struct sched_param *res) { long priority; - if (!Py_IS_TYPE(param, (PyTypeObject *)_posixstate_global->SchedParamType)) { + if (!Py_IS_TYPE(param, (PyTypeObject *)get_posix_state(module)->SchedParamType)) { PyErr_SetString(PyExc_TypeError, "must have a sched_param object"); return 0; } @@ -6405,7 +6401,7 @@ os.sched_setscheduler pid: pid_t policy: int - param: sched_param + param as param_obj: object / Set the scheduling policy for the process identified by pid. @@ -6416,15 +6412,20 @@ param is an instance of sched_param. static PyObject * os_sched_setscheduler_impl(PyObject *module, pid_t pid, int policy, - struct sched_param *param) -/*[clinic end generated code: output=b0ac0a70d3b1d705 input=c581f9469a5327dd]*/ + PyObject *param_obj) +/*[clinic end generated code: output=cde27faa55dc993e input=73013d731bd8fbe9]*/ { + struct sched_param param; + if (!convert_sched_param(module, param_obj, ¶m)) { + return NULL; + } + /* ** sched_setscheduler() returns 0 in Linux, but the previous ** scheduling policy under Solaris/Illumos, and others. ** On error, -1 is returned in all Operating Systems. */ - if (sched_setscheduler(pid, policy, param) == -1) + if (sched_setscheduler(pid, policy, ¶m) == -1) return posix_error(); Py_RETURN_NONE; } @@ -6453,7 +6454,7 @@ os_sched_getparam_impl(PyObject *module, pid_t pid) if (sched_getparam(pid, ¶m)) return posix_error(); - PyObject *SchedParamType = _posixstate_global->SchedParamType; + PyObject *SchedParamType = get_posix_state(module)->SchedParamType; result = PyStructSequence_New((PyTypeObject *)SchedParamType); if (!result) return NULL; @@ -6470,7 +6471,7 @@ os_sched_getparam_impl(PyObject *module, pid_t pid) /*[clinic input] os.sched_setparam pid: pid_t - param: sched_param + param as param_obj: object / Set scheduling parameters for the process identified by pid. @@ -6480,11 +6481,15 @@ param should be an instance of sched_param. [clinic start generated code]*/ static PyObject * -os_sched_setparam_impl(PyObject *module, pid_t pid, - struct sched_param *param) -/*[clinic end generated code: output=8af013f78a32b591 input=6b8d6dfcecdc21bd]*/ +os_sched_setparam_impl(PyObject *module, pid_t pid, PyObject *param_obj) +/*[clinic end generated code: output=f19fe020a53741c1 input=27b98337c8b2dcc7]*/ { - if (sched_setparam(pid, param)) + struct sched_param param; + if (!convert_sched_param(module, param_obj, ¶m)) { + return NULL; + } + + if (sched_setparam(pid, ¶m)) return posix_error(); Py_RETURN_NONE; } @@ -7710,7 +7715,7 @@ os_setgroups(PyObject *module, PyObject *groups) #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) static PyObject * -wait_helper(pid_t pid, int status, struct rusage *ru) +wait_helper(PyObject *module, pid_t pid, int status, struct rusage *ru) { PyObject *result; PyObject *struct_rusage; @@ -7727,7 +7732,7 @@ wait_helper(pid_t pid, int status, struct rusage *ru) PyObject *m = PyImport_ImportModuleNoBlock("resource"); if (m == NULL) return NULL; - struct_rusage = PyObject_GetAttr(m, _posixstate_global->struct_rusage); + struct_rusage = PyObject_GetAttr(m, get_posix_state(module)->struct_rusage); Py_DECREF(m); if (struct_rusage == NULL) return NULL; @@ -7803,7 +7808,7 @@ os_wait3_impl(PyObject *module, int options) if (pid < 0) return (!async_err) ? posix_error() : NULL; - return wait_helper(pid, WAIT_STATUS_INT(status), &ru); + return wait_helper(module, pid, WAIT_STATUS_INT(status), &ru); } #endif /* HAVE_WAIT3 */ @@ -7840,7 +7845,7 @@ os_wait4_impl(PyObject *module, pid_t pid, int options) if (res < 0) return (!async_err) ? posix_error() : NULL; - return wait_helper(res, WAIT_STATUS_INT(status), &ru); + return wait_helper(module, res, WAIT_STATUS_INT(status), &ru); } #endif /* HAVE_WAIT4 */ @@ -8375,11 +8380,11 @@ static PyStructSequence_Desc times_result_desc = { #ifdef HAVE_TIMES static PyObject * -build_times_result(double user, double system, +build_times_result(PyObject *module, double user, double system, double children_user, double children_system, double elapsed) { - PyObject *TimesResultType = _posixstate_global->TimesResultType; + PyObject *TimesResultType = get_posix_state(module)->TimesResultType; PyObject *value = PyStructSequence_New((PyTypeObject *)TimesResultType); if (value == NULL) return NULL; @@ -8435,7 +8440,7 @@ os_times_impl(PyObject *module) 1e7 is one second in such units; 1e-7 the inverse. 429.4967296 is 2**32 / 1e7 or 2**32 * 1e-7. */ - return build_times_result( + return build_times_result(module, (double)(user.dwHighDateTime*429.4967296 + user.dwLowDateTime*1e-7), (double)(kernel.dwHighDateTime*429.4967296 + @@ -8454,7 +8459,7 @@ os_times_impl(PyObject *module) c = times(&t); if (c == (clock_t) -1) return posix_error(); - return build_times_result( + return build_times_result(module, (double)t.tms_utime / ticks_per_second, (double)t.tms_stime / ticks_per_second, (double)t.tms_cutime / ticks_per_second, @@ -9515,7 +9520,7 @@ os_fstat_impl(PyObject *module, int fd) #endif } - return _pystat_fromstructstat(&st); + return _pystat_fromstructstat(module, &st); } @@ -10601,8 +10606,8 @@ os_WSTOPSIG_impl(PyObject *module, int status) #include static PyObject* -_pystatvfs_fromstructstatvfs(struct statvfs st) { - PyObject *StatVFSResultType = _posixstate_global->StatVFSResultType; +_pystatvfs_fromstructstatvfs(PyObject *module, struct statvfs st) { + PyObject *StatVFSResultType = get_posix_state(module)->StatVFSResultType; PyObject *v = PyStructSequence_New((PyTypeObject *)StatVFSResultType); if (v == NULL) return NULL; @@ -10679,7 +10684,7 @@ os_fstatvfs_impl(PyObject *module, int fd) if (result != 0) return (!async_err) ? posix_error() : NULL; - return _pystatvfs_fromstructstatvfs(st); + return _pystatvfs_fromstructstatvfs(module, st); } #endif /* defined(HAVE_FSTATVFS) && defined(HAVE_SYS_STATVFS_H) */ @@ -10726,7 +10731,7 @@ os_statvfs_impl(PyObject *module, path_t *path) return path_error(path); } - return _pystatvfs_fromstructstatvfs(st); + return _pystatvfs_fromstructstatvfs(module, st); } #endif /* defined(HAVE_STATVFS) && defined(HAVE_SYS_STATVFS_H) */ @@ -12768,6 +12773,12 @@ os_DirEntry_is_symlink_impl(DirEntry *self) #endif } +static inline PyObject* +DirEntry_get_module(DirEntry *self) +{ + return PyType_GetModule(Py_TYPE(self)); +} + static PyObject * DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) { @@ -12805,7 +12816,7 @@ DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) if (result != 0) return path_object_error(self->path); - return _pystat_fromstructstat(&st); + return _pystat_fromstructstat(DirEntry_get_module(self), &st); } static PyObject * @@ -12813,7 +12824,8 @@ DirEntry_get_lstat(DirEntry *self) { if (!self->lstat) { #ifdef MS_WINDOWS - self->lstat = _pystat_fromstructstat(&self->win32_lstat); + self->lstat = _pystat_fromstructstat(DirEntry_get_module(self), + &self->win32_lstat); #else /* POSIX */ self->lstat = DirEntry_fetch_stat(self, 0); #endif @@ -12888,7 +12900,7 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits } goto error; } - st_mode = PyObject_GetAttr(stat, _posixstate_global->st_mode); + st_mode = PyObject_GetAttr(stat, get_posix_state(DirEntry_get_module(self))->st_mode); if (!st_mode) goto error; @@ -13092,14 +13104,14 @@ join_path_filenameW(const wchar_t *path_wide, const wchar_t *filename) } static PyObject * -DirEntry_from_find_data(path_t *path, WIN32_FIND_DATAW *dataW) +DirEntry_from_find_data(PyObject *module, path_t *path, WIN32_FIND_DATAW *dataW) { DirEntry *entry; BY_HANDLE_FILE_INFORMATION file_info; ULONG reparse_tag; wchar_t *joined_path; - PyObject *DirEntryType = _posixstate_global->DirEntryType; + PyObject *DirEntryType = get_posix_state(module)->DirEntryType; entry = PyObject_New(DirEntry, (PyTypeObject *)DirEntryType); if (!entry) return NULL; @@ -13177,8 +13189,8 @@ join_path_filename(const char *path_narrow, const char* filename, Py_ssize_t fil } static PyObject * -DirEntry_from_posix_info(path_t *path, const char *name, Py_ssize_t name_len, - ino_t d_ino +DirEntry_from_posix_info(PyObject *module, path_t *path, const char *name, + Py_ssize_t name_len, ino_t d_ino #ifdef HAVE_DIRENT_D_TYPE , unsigned char d_type #endif @@ -13187,7 +13199,7 @@ DirEntry_from_posix_info(path_t *path, const char *name, Py_ssize_t name_len, DirEntry *entry; char *joined_path; - PyObject *DirEntryType = _posixstate_global->DirEntryType; + PyObject *DirEntryType = get_posix_state(module)->DirEntryType; entry = PyObject_New(DirEntry, (PyTypeObject *)DirEntryType); if (!entry) return NULL; @@ -13307,8 +13319,10 @@ ScandirIterator_iternext(ScandirIterator *iterator) /* Skip over . and .. */ if (wcscmp(file_data->cFileName, L".") != 0 && - wcscmp(file_data->cFileName, L"..") != 0) { - entry = DirEntry_from_find_data(&iterator->path, file_data); + wcscmp(file_data->cFileName, L"..") != 0) + { + PyObject *module = PyType_GetModule(Py_TYPE(iterator)); + entry = DirEntry_from_find_data(module, &iterator->path, file_data); if (!entry) break; return entry; @@ -13379,10 +13393,12 @@ ScandirIterator_iternext(ScandirIterator *iterator) is_dot = direntp->d_name[0] == '.' && (name_len == 1 || (direntp->d_name[1] == '.' && name_len == 2)); if (!is_dot) { - entry = DirEntry_from_posix_info(&iterator->path, direntp->d_name, - name_len, direntp->d_ino + PyObject *module = PyType_GetModule(Py_TYPE(iterator)); + entry = DirEntry_from_posix_info(module, + &iterator->path, direntp->d_name, + name_len, direntp->d_ino #ifdef HAVE_DIRENT_D_TYPE - , direntp->d_type + , direntp->d_type #endif ); if (!entry) @@ -14632,19 +14648,6 @@ all_ins(PyObject *m) } -static struct PyModuleDef posixmodule = { - PyModuleDef_HEAD_INIT, - MODNAME, - posix__doc__, - sizeof(_posixstate), - posix_methods, - NULL, - _posix_traverse, - _posix_clear, - _posix_free, -}; - - static const char * const have_functions[] = { #ifdef HAVE_FACCESSAT @@ -14779,35 +14782,25 @@ static const char * const have_functions[] = { }; -PyMODINIT_FUNC -INITFUNC(void) +static int +posixmodule_exec(PyObject *m) { - PyObject *m, *v; + PyObject *v; PyObject *list; const char * const *trace; - m = PyState_FindModule(&posixmodule); - if (m != NULL) { - Py_INCREF(m); - return m; - } - - m = PyModule_Create(&posixmodule); - if (m == NULL) - return NULL; - /* Initialize environ dictionary */ v = convertenviron(); Py_XINCREF(v); if (v == NULL || PyModule_AddObject(m, "environ", v) != 0) - return NULL; + return -1; Py_DECREF(v); if (all_ins(m)) - return NULL; + return -1; if (setup_confname_tables(m)) - return NULL; + return -1; Py_INCREF(PyExc_OSError); PyModule_AddObject(m, "error", PyExc_OSError); @@ -14816,7 +14809,7 @@ INITFUNC(void) waitid_result_desc.name = MODNAME ".waitid_result"; PyObject *WaitidResultType = (PyObject *)PyStructSequence_NewType(&waitid_result_desc); if (WaitidResultType == NULL) { - return NULL; + return -1; } Py_INCREF(WaitidResultType); PyModule_AddObject(m, "waitid_result", WaitidResultType); @@ -14829,7 +14822,7 @@ INITFUNC(void) stat_result_desc.fields[9].name = PyStructSequence_UnnamedField; PyObject *StatResultType = (PyObject *)PyStructSequence_NewType(&stat_result_desc); if (StatResultType == NULL) { - return NULL; + return -1; } Py_INCREF(StatResultType); PyModule_AddObject(m, "stat_result", StatResultType); @@ -14840,7 +14833,7 @@ INITFUNC(void) statvfs_result_desc.name = "os.statvfs_result"; /* see issue #19209 */ PyObject *StatVFSResultType = (PyObject *)PyStructSequence_NewType(&statvfs_result_desc); if (StatVFSResultType == NULL) { - return NULL; + return -1; } Py_INCREF(StatVFSResultType); PyModule_AddObject(m, "statvfs_result", StatVFSResultType); @@ -14859,7 +14852,7 @@ INITFUNC(void) sched_param_desc.name = MODNAME ".sched_param"; PyObject *SchedParamType = (PyObject *)PyStructSequence_NewType(&sched_param_desc); if (SchedParamType == NULL) { - return NULL; + return -1; } Py_INCREF(SchedParamType); PyModule_AddObject(m, "sched_param", SchedParamType); @@ -14870,22 +14863,22 @@ INITFUNC(void) /* initialize TerminalSize_info */ PyObject *TerminalSizeType = (PyObject *)PyStructSequence_NewType(&TerminalSize_desc); if (TerminalSizeType == NULL) { - return NULL; + return -1; } Py_INCREF(TerminalSizeType); PyModule_AddObject(m, "terminal_size", TerminalSizeType); get_posix_state(m)->TerminalSizeType = TerminalSizeType; /* initialize scandir types */ - PyObject *ScandirIteratorType = PyType_FromSpec(&ScandirIteratorType_spec); + PyObject *ScandirIteratorType = PyType_FromModuleAndSpec(m, &ScandirIteratorType_spec, NULL); if (ScandirIteratorType == NULL) { - return NULL; + return -1; } get_posix_state(m)->ScandirIteratorType = ScandirIteratorType; - PyObject *DirEntryType = PyType_FromSpec(&DirEntryType_spec); + PyObject *DirEntryType = PyType_FromModuleAndSpec(m, &DirEntryType_spec, NULL); if (DirEntryType == NULL) { - return NULL; + return -1; } Py_INCREF(DirEntryType); PyModule_AddObject(m, "DirEntry", DirEntryType); @@ -14894,7 +14887,7 @@ INITFUNC(void) times_result_desc.name = MODNAME ".times_result"; PyObject *TimesResultType = (PyObject *)PyStructSequence_NewType(×_result_desc); if (TimesResultType == NULL) { - return NULL; + return -1; } Py_INCREF(TimesResultType); PyModule_AddObject(m, "times_result", TimesResultType); @@ -14902,7 +14895,7 @@ INITFUNC(void) PyTypeObject *UnameResultType = PyStructSequence_NewType(&uname_result_desc); if (UnameResultType == NULL) { - return NULL; + return -1; } Py_INCREF(UnameResultType); PyModule_AddObject(m, "uname_result", (PyObject *)UnameResultType); @@ -14922,7 +14915,7 @@ INITFUNC(void) #ifdef HAVE_FSTATVFS if (fstatvfs == NULL) { if (PyObject_DelAttrString(m, "fstatvfs") == -1) { - return NULL; + return -1; } } #endif /* HAVE_FSTATVFS */ @@ -14930,7 +14923,7 @@ INITFUNC(void) #ifdef HAVE_STATVFS if (statvfs == NULL) { if (PyObject_DelAttrString(m, "statvfs") == -1) { - return NULL; + return -1; } } #endif /* HAVE_STATVFS */ @@ -14938,7 +14931,7 @@ INITFUNC(void) # ifdef HAVE_LCHOWN if (lchown == NULL) { if (PyObject_DelAttrString(m, "lchown") == -1) { - return NULL; + return -1; } } #endif /* HAVE_LCHOWN */ @@ -14947,15 +14940,15 @@ INITFUNC(void) #endif /* __APPLE__ */ if ((get_posix_state(m)->billion = PyLong_FromLong(1000000000)) == NULL) - return NULL; + return -1; #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) get_posix_state(m)->struct_rusage = PyUnicode_InternFromString("struct_rusage"); if (get_posix_state(m)->struct_rusage == NULL) - return NULL; + return -1; #endif get_posix_state(m)->st_mode = PyUnicode_InternFromString("st_mode"); if (get_posix_state(m)->st_mode == NULL) - return NULL; + return -1; /* suppress "function not used" warnings */ { @@ -14973,18 +14966,42 @@ INITFUNC(void) */ list = PyList_New(0); if (!list) - return NULL; + return -1; for (trace = have_functions; *trace; trace++) { PyObject *unicode = PyUnicode_DecodeASCII(*trace, strlen(*trace), NULL); if (!unicode) - return NULL; + return -1; if (PyList_Append(list, unicode)) - return NULL; + return -1; Py_DECREF(unicode); } PyModule_AddObject(m, "_have_functions", list); - return m; + return 0; +} + + +static PyModuleDef_Slot posixmodile_slots[] = { + {Py_mod_exec, posixmodule_exec}, + {0, NULL} +}; + +static struct PyModuleDef posixmodule = { + PyModuleDef_HEAD_INIT, + .m_name = MODNAME, + .m_doc = posix__doc__, + .m_size = sizeof(_posixstate), + .m_methods = posix_methods, + .m_slots = posixmodile_slots, + .m_traverse = _posix_traverse, + .m_clear = _posix_clear, + .m_free = _posix_free, +}; + +PyMODINIT_FUNC +INITFUNC(void) +{ + return PyModuleDef_Init(&posixmodule); } #ifdef __cplusplus From webhook-mailer at python.org Sun May 10 05:16:02 2020 From: webhook-mailer at python.org (Sergey Fedoseev) Date: Sun, 10 May 2020 09:16:02 -0000 Subject: [Python-checkins] bpo-37986: Improve perfomance of PyLong_FromDouble() (GH-15611) Message-ID: https://github.com/python/cpython/commit/86a93fddf72a2711aca99afa0c5374c8d6b4a321 commit: 86a93fddf72a2711aca99afa0c5374c8d6b4a321 branch: master author: Sergey Fedoseev committer: GitHub date: 2020-05-10T10:15:57+01:00 summary: bpo-37986: Improve perfomance of PyLong_FromDouble() (GH-15611) * bpo-37986: Improve perfomance of PyLong_FromDouble() * Use strict bound check for safety and symmetry * Remove possibly outdated performance claims Co-authored-by: Mark Dickinson files: A Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst M Objects/floatobject.c M Objects/longobject.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst b/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst new file mode 100644 index 0000000000000..62446e35ae01b --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst @@ -0,0 +1,2 @@ +Improve performance of :c:func:`PyLong_FromDouble` for values that fit into +:c:type:`long`. diff --git a/Objects/floatobject.c b/Objects/floatobject.c index faa02f2f05795..9f5014092cf20 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -862,27 +862,7 @@ static PyObject * float___trunc___impl(PyObject *self) /*[clinic end generated code: output=dd3e289dd4c6b538 input=591b9ba0d650fdff]*/ { - double x = PyFloat_AsDouble(self); - double wholepart; /* integral portion of x, rounded toward 0 */ - - (void)modf(x, &wholepart); - /* Try to get out cheap if this fits in a Python int. The attempt - * to cast to long must be protected, as C doesn't define what - * happens if the double is too big to fit in a long. Some rare - * systems raise an exception then (RISCOS was mentioned as one, - * and someone using a non-default option on Sun also bumped into - * that). Note that checking for >= and <= LONG_{MIN,MAX} would - * still be vulnerable: if a long has more bits of precision than - * a double, casting MIN/MAX to double may yield an approximation, - * and if that's rounded up, then, e.g., wholepart=LONG_MAX+1 would - * yield true from the C expression wholepart<=LONG_MAX, despite - * that wholepart is actually greater than LONG_MAX. - */ - if (LONG_MIN < wholepart && wholepart < LONG_MAX) { - const long aslong = (long)wholepart; - return PyLong_FromLong(aslong); - } - return PyLong_FromDouble(wholepart); + return PyLong_FromDouble(PyFloat_AS_DOUBLE(self)); } /*[clinic input] diff --git a/Objects/longobject.c b/Objects/longobject.c index 11fc75b918f77..0ff0e80cd4269 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -416,6 +416,21 @@ PyLong_FromSize_t(size_t ival) PyObject * PyLong_FromDouble(double dval) { + /* Try to get out cheap if this fits in a long. When a finite value of real + * floating type is converted to an integer type, the value is truncated + * toward zero. If the value of the integral part cannot be represented by + * the integer type, the behavior is undefined. Thus, we must check that + * value is in range (LONG_MIN - 1, LONG_MAX + 1). If a long has more bits + * of precision than a double, casting LONG_MIN - 1 to double may yield an + * approximation, but LONG_MAX + 1 is a power of two and can be represented + * as double exactly (assuming FLT_RADIX is 2 or 16), so for simplicity + * check against [-(LONG_MAX + 1), LONG_MAX + 1). + */ + const double int_max = (unsigned long)LONG_MAX + 1; + if (-int_max < dval && dval < int_max) { + return PyLong_FromLong((long)dval); + } + PyLongObject *v; double frac; int i, ndig, expo, neg; @@ -435,8 +450,7 @@ PyLong_FromDouble(double dval) dval = -dval; } frac = frexp(dval, &expo); /* dval = frac*2**expo; 0.0 <= frac < 1.0 */ - if (expo <= 0) - return PyLong_FromLong(0L); + assert(expo > 0); ndig = (expo-1) / PyLong_SHIFT + 1; /* Number of 'digits' in result */ v = _PyLong_New(ndig); if (v == NULL) From webhook-mailer at python.org Sun May 10 06:39:52 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sun, 10 May 2020 10:39:52 -0000 Subject: [Python-checkins] bpo-40397: Fix subscription of nested generic alias without parameters. (GH-20021) Message-ID: https://github.com/python/cpython/commit/0122d48681b1df27015cf396559fb283ba364d6d commit: 0122d48681b1df27015cf396559fb283ba364d6d branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-10T13:39:40+03:00 summary: bpo-40397: Fix subscription of nested generic alias without parameters. (GH-20021) files: M Lib/test/test_typing.py M Lib/typing.py diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 21bc7c81f2a30..f429e883b5953 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -10,7 +10,7 @@ from typing import TypeVar, AnyStr from typing import T, KT, VT # Not in __all__. from typing import Union, Optional, Literal -from typing import Tuple, List, MutableMapping +from typing import Tuple, List, Dict, MutableMapping from typing import Callable from typing import Generic, ClassVar, Final, final, Protocol from typing import cast, runtime_checkable @@ -3173,6 +3173,17 @@ def test_frozenset(self): def test_dict(self): self.assertIsSubclass(dict, typing.Dict) + def test_dict_subscribe(self): + K = TypeVar('K') + V = TypeVar('V') + self.assertEqual(Dict[K, V][str, int], Dict[str, int]) + self.assertEqual(Dict[K, int][str], Dict[str, int]) + self.assertEqual(Dict[str, V][int], Dict[str, int]) + self.assertEqual(Dict[K, List[V]][str, int], Dict[str, List[int]]) + self.assertEqual(Dict[K, List[int]][str], Dict[str, List[int]]) + self.assertEqual(Dict[K, list[V]][str, int], Dict[str, list[int]]) + self.assertEqual(Dict[K, list[int]][str], Dict[str, list[int]]) + def test_no_list_instantiation(self): with self.assertRaises(TypeError): typing.List() diff --git a/Lib/typing.py b/Lib/typing.py index e31fc99e02245..b5ba38e07c835 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -702,8 +702,10 @@ def __getitem__(self, params): if isinstance(arg, TypeVar): arg = subst[arg] elif isinstance(arg, (_GenericAlias, GenericAlias)): - subargs = tuple(subst[x] for x in arg.__parameters__) - arg = arg[subargs] + subparams = arg.__parameters__ + if subparams: + subargs = tuple(subst[x] for x in subparams) + arg = arg[subargs] new_args.append(arg) return self.copy_with(tuple(new_args)) From webhook-mailer at python.org Sun May 10 08:14:39 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sun, 10 May 2020 12:14:39 -0000 Subject: [Python-checkins] bpo-40257: Tweak docstrings for special generic aliases. (GH-20022) Message-ID: https://github.com/python/cpython/commit/2fbc57af851814df567fb51054cb6f6a399f814a commit: 2fbc57af851814df567fb51054cb6f6a399f814a branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-10T15:14:27+03:00 summary: bpo-40257: Tweak docstrings for special generic aliases. (GH-20022) * Add the terminating period. * Omit module name for builtin types. files: M Lib/typing.py diff --git a/Lib/typing.py b/Lib/typing.py index b5ba38e07c835..f94996daebd6e 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -753,7 +753,10 @@ def __init__(self, origin, nparams, *, inst=True, name=None): name = origin.__name__ super().__init__(origin, inst=inst, name=name) self._nparams = nparams - self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}' + if origin.__module__ == 'builtins': + self.__doc__ = f'A generic version of {origin.__qualname__}.' + else: + self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.' @_tp_cache def __getitem__(self, params): From webhook-mailer at python.org Sun May 10 17:53:44 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sun, 10 May 2020 21:53:44 -0000 Subject: [Python-checkins] Improve code clarity for the set lookup logic (GH-20028) Message-ID: https://github.com/python/cpython/commit/2cc9b8486dd924214f9e5657672fdeb24449d206 commit: 2cc9b8486dd924214f9e5657672fdeb24449d206 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-10T14:53:29-07:00 summary: Improve code clarity for the set lookup logic (GH-20028) files: M Objects/setobject.c diff --git a/Objects/setobject.c b/Objects/setobject.c index 0e4e45f60a9cc..76b1944db4558 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -57,77 +57,43 @@ set_lookkey(PySetObject *so, PyObject *key, Py_hash_t hash) { setentry *table; setentry *entry; - size_t perturb; + size_t perturb = hash; size_t mask = so->mask; size_t i = (size_t)hash & mask; /* Unsigned for defined overflow behavior */ - size_t j; + int probes; int cmp; - entry = &so->table[i]; - if (entry->key == NULL) - return entry; - - perturb = hash; - while (1) { - if (entry->hash == hash) { - PyObject *startkey = entry->key; - /* startkey cannot be a dummy because the dummy hash field is -1 */ - assert(startkey != dummy); - if (startkey == key) - return entry; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - return entry; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp < 0) /* unlikely */ - return NULL; - if (table != so->table || entry->key != startkey) /* unlikely */ - return set_lookkey(so, key, hash); - if (cmp > 0) /* likely */ + entry = &so->table[i]; + probes = (i + LINEAR_PROBES <= mask) ? LINEAR_PROBES: 0; + do { + if (entry->hash == 0 && entry->key == NULL) return entry; - mask = so->mask; /* help avoid a register spill */ - } - - if (i + LINEAR_PROBES <= mask) { - for (j = 0 ; j < LINEAR_PROBES ; j++) { - entry++; - if (entry->hash == 0 && entry->key == NULL) + if (entry->hash == hash) { + PyObject *startkey = entry->key; + assert(startkey != dummy); + if (startkey == key) return entry; - if (entry->hash == hash) { - PyObject *startkey = entry->key; - assert(startkey != dummy); - if (startkey == key) - return entry; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - return entry; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp < 0) - return NULL; - if (table != so->table || entry->key != startkey) - return set_lookkey(so, key, hash); - if (cmp > 0) - return entry; - mask = so->mask; - } + if (PyUnicode_CheckExact(startkey) + && PyUnicode_CheckExact(key) + && _PyUnicode_EQ(startkey, key)) + return entry; + table = so->table; + Py_INCREF(startkey); + cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); + Py_DECREF(startkey); + if (cmp < 0) + return NULL; + if (table != so->table || entry->key != startkey) + return set_lookkey(so, key, hash); + if (cmp > 0) + return entry; + mask = so->mask; } - } - + entry++; + } while (probes--); perturb >>= PERTURB_SHIFT; i = (i * 5 + 1 + perturb) & mask; - - entry = &so->table[i]; - if (entry->key == NULL) - return entry; } } @@ -141,7 +107,7 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) size_t perturb; size_t mask; size_t i; /* Unsigned for defined overflow behavior */ - size_t j; + int probes; int cmp; /* Pre-increment is necessary to prevent arbitrary code in the rich @@ -152,75 +118,39 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) mask = so->mask; i = (size_t)hash & mask; - - entry = &so->table[i]; - if (entry->key == NULL) - goto found_unused; - perturb = hash; while (1) { - if (entry->hash == hash) { - PyObject *startkey = entry->key; - /* startkey cannot be a dummy because the dummy hash field is -1 */ - assert(startkey != dummy); - if (startkey == key) - goto found_active; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - goto found_active; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp > 0) /* likely */ - goto found_active; - if (cmp < 0) - goto comparison_error; - /* Continuing the search from the current entry only makes - sense if the table and entry are unchanged; otherwise, - we have to restart from the beginning */ - if (table != so->table || entry->key != startkey) - goto restart; - mask = so->mask; /* help avoid a register spill */ - } - - if (i + LINEAR_PROBES <= mask) { - for (j = 0 ; j < LINEAR_PROBES ; j++) { - entry++; - if (entry->hash == 0 && entry->key == NULL) - goto found_unused; - if (entry->hash == hash) { - PyObject *startkey = entry->key; - assert(startkey != dummy); - if (startkey == key) - goto found_active; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - goto found_active; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp > 0) - goto found_active; - if (cmp < 0) - goto comparison_error; - if (table != so->table || entry->key != startkey) - goto restart; - mask = so->mask; - } + entry = &so->table[i]; + probes = (i + LINEAR_PROBES <= mask) ? LINEAR_PROBES: 0; + do { + if (entry->hash == 0 && entry->key == NULL) + goto found_unused; + if (entry->hash == hash) { + PyObject *startkey = entry->key; + assert(startkey != dummy); + if (startkey == key) + goto found_active; + if (PyUnicode_CheckExact(startkey) + && PyUnicode_CheckExact(key) + && _PyUnicode_EQ(startkey, key)) + goto found_active; + table = so->table; + Py_INCREF(startkey); + cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); + Py_DECREF(startkey); + if (cmp > 0) + goto found_active; + if (cmp < 0) + goto comparison_error; + if (table != so->table || entry->key != startkey) + goto restart; + mask = so->mask; } - } - + entry++; + } while (probes--); perturb >>= PERTURB_SHIFT; i = (i * 5 + 1 + perturb) & mask; - - entry = &so->table[i]; - if (entry->key == NULL) - goto found_unused; } found_unused: From webhook-mailer at python.org Sun May 10 20:41:37 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 11 May 2020 00:41:37 -0000 Subject: [Python-checkins] bpo-40585: Normalize errors messages in codeop when comparing them (GH-20030) Message-ID: https://github.com/python/cpython/commit/5b956ca42de37c761562e9c9aeb96a0e67606e33 commit: 5b956ca42de37c761562e9c9aeb96a0e67606e33 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-11T01:41:26+01:00 summary: bpo-40585: Normalize errors messages in codeop when comparing them (GH-20030) With the new parser, the error message contains always the trailing newlines, causing the comparison of the repr of the error messages in codeop to fail. This commit makes the new parser mirror the old parser's behaviour regarding trailing newlines. files: A Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst M Lib/test/test_codeop.py M Parser/pegen/pegen.c diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 1f27830ae50b8..0c5e362feea0c 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -288,6 +288,15 @@ def test_invalid(self): ai("[i for i in range(10)] = (1, 2, 3)") + def test_invalid_exec(self): + ai = self.assertInvalid + ai("raise = 4", symbol="exec") + ai('def a-b', symbol='exec') + ai('await?', symbol='exec') + ai('=!=', symbol='exec') + ai('a await raise b', symbol='exec') + ai('a await raise b?+1', symbol='exec') + def test_filename(self): self.assertEqual(compile_command("a = 1\n", "abc").co_filename, compile("a = 1\n", "abc", 'single').co_filename) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst new file mode 100644 index 0000000000000..7a9258ef0a938 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst @@ -0,0 +1,2 @@ +Fixed a bug when using :func:`codeop.compile_command` that was causing +exceptions to be swallowed with the new parser. Patch by Pablo Galindo diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 06af53b3597f7..c80f08668b07d 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -310,6 +310,12 @@ get_error_line(char *buffer, int is_file) newline = strchr(buffer, '\n'); } + if (is_file) { + while (newline > buffer && newline[-1] == '\n') { + --newline; + } + } + if (newline) { return PyUnicode_DecodeUTF8(buffer, newline - buffer, "replace"); } From webhook-mailer at python.org Mon May 11 00:04:39 2020 From: webhook-mailer at python.org (scoder) Date: Mon, 11 May 2020 04:04:39 -0000 Subject: [Python-checkins] bpo-40575: Avoid unnecessary overhead in _PyDict_GetItemIdWithError() (GH-20018) Message-ID: https://github.com/python/cpython/commit/6067d4bc3ce5ff4cfa5b47ceecc84a3941bc031c commit: 6067d4bc3ce5ff4cfa5b47ceecc84a3941bc031c branch: master author: scoder committer: GitHub date: 2020-05-11T06:04:31+02:00 summary: bpo-40575: Avoid unnecessary overhead in _PyDict_GetItemIdWithError() (GH-20018) Avoid unnecessary overhead in _PyDict_GetItemIdWithError() by calling _PyDict_GetItem_KnownHash() instead of the more generic PyDict_GetItemWithError(), since we already know the hash of interned strings. files: M Objects/dictobject.c diff --git a/Objects/dictobject.c b/Objects/dictobject.c index fa35d16478f63..809a5ed778737 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -1492,7 +1492,9 @@ _PyDict_GetItemIdWithError(PyObject *dp, struct _Py_Identifier *key) kv = _PyUnicode_FromId(key); /* borrowed */ if (kv == NULL) return NULL; - return PyDict_GetItemWithError(dp, kv); + Py_hash_t hash = ((PyASCIIObject *) kv)->hash; + assert (hash != -1); /* interned strings have their hash value initialised */ + return _PyDict_GetItem_KnownHash(dp, kv, hash); } PyObject * From webhook-mailer at python.org Mon May 11 02:37:33 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 11 May 2020 06:37:33 -0000 Subject: [Python-checkins] bpo-36346: array: Don't use deprecated APIs (GH-19653) Message-ID: https://github.com/python/cpython/commit/d5d9a718662e67e2b1ac7874dda9df2d8d71d415 commit: d5d9a718662e67e2b1ac7874dda9df2d8d71d415 branch: master author: Inada Naoki committer: GitHub date: 2020-05-11T15:37:25+09:00 summary: bpo-36346: array: Don't use deprecated APIs (GH-19653) * Py_UNICODE -> wchar_t * Py_UNICODE -> unicode in Argument Clinic * PyUnicode_AsUnicode -> PyUnicode_AsWideCharString * Don't use "u#" format. Co-authored-by: Victor Stinner files: M Doc/library/array.rst M Doc/whatsnew/3.9.rst M Modules/arraymodule.c M Modules/clinic/arraymodule.c.h diff --git a/Doc/library/array.rst b/Doc/library/array.rst index c9a9b1dabb2a7..78020738bf4f7 100644 --- a/Doc/library/array.rst +++ b/Doc/library/array.rst @@ -22,7 +22,7 @@ defined: +-----------+--------------------+-------------------+-----------------------+-------+ | ``'B'`` | unsigned char | int | 1 | | +-----------+--------------------+-------------------+-----------------------+-------+ -| ``'u'`` | Py_UNICODE | Unicode character | 2 | \(1) | +| ``'u'`` | wchar_t | Unicode character | 2 | \(1) | +-----------+--------------------+-------------------+-----------------------+-------+ | ``'h'`` | signed short | int | 2 | | +-----------+--------------------+-------------------+-----------------------+-------+ @@ -48,15 +48,16 @@ defined: Notes: (1) - The ``'u'`` type code corresponds to Python's obsolete unicode character - (:c:type:`Py_UNICODE` which is :c:type:`wchar_t`). Depending on the - platform, it can be 16 bits or 32 bits. + It can be 16 bits or 32 bits depending on the platform. - ``'u'`` will be removed together with the rest of the :c:type:`Py_UNICODE` - API. + .. versionchanged:: 3.9 + ``array('u')`` now uses ``wchar_t`` as C type instead of deprecated + ``Py_UNICODE``. This change doesn't affect to its behavior because + ``Py_UNICODE`` is alias of ``wchar_t`` since Python 3.3. .. deprecated-removed:: 3.3 4.0 + The actual representation of values is determined by the machine architecture (strictly speaking, by the C implementation). The actual size can be accessed through the :attr:`itemsize` attribute. diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 11e577baa8fb5..c57d702dce867 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -786,6 +786,12 @@ Changes in the Python API ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. (Contributed by Batuhan Taskaya in :issue:`39562`) +* ``array('u')`` now uses ``wchar_t`` as C type instead of ``Py_UNICODE``. + This change doesn't affect to its behavior because ``Py_UNICODE`` is alias + of ``wchar_t`` since Python 3.3. + (Contributed by Inada Naoki in :issue:`34538`.) + + CPython bytecode changes ------------------------ diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 4920ad7b82124..732703e481adc 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -235,24 +235,31 @@ BB_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) static PyObject * u_getitem(arrayobject *ap, Py_ssize_t i) { - return PyUnicode_FromOrdinal(((Py_UNICODE *) ap->ob_item)[i]); + return PyUnicode_FromOrdinal(((wchar_t *) ap->ob_item)[i]); } static int u_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) { - Py_UNICODE *p; - Py_ssize_t len; - - if (!PyArg_Parse(v, "u#;array item must be unicode character", &p, &len)) + PyObject *u; + if (!PyArg_Parse(v, "U;array item must be unicode character", &u)) { return -1; - if (len != 1) { + } + + Py_ssize_t len = PyUnicode_AsWideChar(u, NULL, 0); + if (len != 2) { PyErr_SetString(PyExc_TypeError, "array item must be unicode character"); return -1; } - if (i >= 0) - ((Py_UNICODE *)ap->ob_item)[i] = p[0]; + + wchar_t w; + len = PyUnicode_AsWideChar(u, &w, 1); + assert(len == 1); + + if (i >= 0) { + ((wchar_t *)ap->ob_item)[i] = w; + } return 0; } @@ -530,7 +537,7 @@ d_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) DEFINE_COMPAREITEMS(b, signed char) DEFINE_COMPAREITEMS(BB, unsigned char) -DEFINE_COMPAREITEMS(u, Py_UNICODE) +DEFINE_COMPAREITEMS(u, wchar_t) DEFINE_COMPAREITEMS(h, short) DEFINE_COMPAREITEMS(HH, unsigned short) DEFINE_COMPAREITEMS(i, int) @@ -548,7 +555,7 @@ DEFINE_COMPAREITEMS(QQ, unsigned long long) static const struct arraydescr descriptors[] = { {'b', 1, b_getitem, b_setitem, b_compareitems, "b", 1, 1}, {'B', 1, BB_getitem, BB_setitem, BB_compareitems, "B", 1, 0}, - {'u', sizeof(Py_UNICODE), u_getitem, u_setitem, u_compareitems, "u", 0, 0}, + {'u', sizeof(wchar_t), u_getitem, u_setitem, u_compareitems, "u", 0, 0}, {'h', sizeof(short), h_getitem, h_setitem, h_compareitems, "h", 1, 1}, {'H', sizeof(short), HH_getitem, HH_setitem, HH_compareitems, "H", 1, 0}, {'i', sizeof(int), i_getitem, i_setitem, i_compareitems, "i", 1, 1}, @@ -1660,7 +1667,7 @@ array_array_tobytes_impl(arrayobject *self) /*[clinic input] array.array.fromunicode - ustr: Py_UNICODE(zeroes=True) + ustr: unicode / Extends this array with data from the unicode string ustr. @@ -1671,25 +1678,28 @@ some other type. [clinic start generated code]*/ static PyObject * -array_array_fromunicode_impl(arrayobject *self, const Py_UNICODE *ustr, - Py_ssize_clean_t ustr_length) -/*[clinic end generated code: output=cf2f662908e2befc input=150f00566ffbca6e]*/ +array_array_fromunicode_impl(arrayobject *self, PyObject *ustr) +/*[clinic end generated code: output=24359f5e001a7f2b input=025db1fdade7a4ce]*/ { - char typecode; - - typecode = self->ob_descr->typecode; - if (typecode != 'u') { + if (self->ob_descr->typecode != 'u') { PyErr_SetString(PyExc_ValueError, "fromunicode() may only be called on " "unicode type arrays"); return NULL; } - if (ustr_length > 0) { + + Py_ssize_t ustr_length = PyUnicode_AsWideChar(ustr, NULL, 0); + assert(ustr_length > 0); + if (ustr_length > 1) { + ustr_length--; /* trim trailing NUL character */ Py_ssize_t old_size = Py_SIZE(self); - if (array_resize(self, old_size + ustr_length) == -1) + if (array_resize(self, old_size + ustr_length) == -1) { return NULL; - memcpy(self->ob_item + old_size * sizeof(Py_UNICODE), - ustr, ustr_length * sizeof(Py_UNICODE)); + } + + // must not fail + PyUnicode_AsWideChar( + ustr, ((wchar_t *)self->ob_item) + old_size, ustr_length); } Py_RETURN_NONE; @@ -1709,14 +1719,12 @@ static PyObject * array_array_tounicode_impl(arrayobject *self) /*[clinic end generated code: output=08e442378336e1ef input=127242eebe70b66d]*/ { - char typecode; - typecode = self->ob_descr->typecode; - if (typecode != 'u') { + if (self->ob_descr->typecode != 'u') { PyErr_SetString(PyExc_ValueError, "tounicode() may only be called on unicode type arrays"); return NULL; } - return PyUnicode_FromWideChar((Py_UNICODE *) self->ob_item, Py_SIZE(self)); + return PyUnicode_FromWideChar((wchar_t *) self->ob_item, Py_SIZE(self)); } /*[clinic input] @@ -2675,30 +2683,20 @@ array_new(PyTypeObject *type, PyObject *args, PyObject *kwds) Py_DECREF(v); } else if (initial != NULL && PyUnicode_Check(initial)) { - Py_UNICODE *ustr; Py_ssize_t n; - - ustr = PyUnicode_AsUnicode(initial); + wchar_t *ustr = PyUnicode_AsWideCharString(initial, &n); if (ustr == NULL) { - PyErr_NoMemory(); Py_DECREF(a); return NULL; } - n = PyUnicode_GET_DATA_SIZE(initial); if (n > 0) { arrayobject *self = (arrayobject *)a; - char *item = self->ob_item; - item = (char *)PyMem_Realloc(item, n); - if (item == NULL) { - PyErr_NoMemory(); - Py_DECREF(a); - return NULL; - } - self->ob_item = item; - Py_SET_SIZE(self, n / sizeof(Py_UNICODE)); - memcpy(item, ustr, n); - self->allocated = Py_SIZE(self); + // self->ob_item may be NULL but it is safe. + PyMem_Free(self->ob_item); + self->ob_item = (char *)ustr; + Py_SET_SIZE(self, n); + self->allocated = n; } } else if (initial != NULL && array_Check(initial) && len > 0) { diff --git a/Modules/clinic/arraymodule.c.h b/Modules/clinic/arraymodule.c.h index e1f4b0397b9cb..b9245ca91d5fa 100644 --- a/Modules/clinic/arraymodule.c.h +++ b/Modules/clinic/arraymodule.c.h @@ -380,20 +380,23 @@ PyDoc_STRVAR(array_array_fromunicode__doc__, {"fromunicode", (PyCFunction)array_array_fromunicode, METH_O, array_array_fromunicode__doc__}, static PyObject * -array_array_fromunicode_impl(arrayobject *self, const Py_UNICODE *ustr, - Py_ssize_clean_t ustr_length); +array_array_fromunicode_impl(arrayobject *self, PyObject *ustr); static PyObject * array_array_fromunicode(arrayobject *self, PyObject *arg) { PyObject *return_value = NULL; - const Py_UNICODE *ustr; - Py_ssize_clean_t ustr_length; + PyObject *ustr; - if (!PyArg_Parse(arg, "u#:fromunicode", &ustr, &ustr_length)) { + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("fromunicode", "argument", "str", arg); goto exit; } - return_value = array_array_fromunicode_impl(self, ustr, ustr_length); + if (PyUnicode_READY(arg) == -1) { + goto exit; + } + ustr = arg; + return_value = array_array_fromunicode_impl(self, ustr); exit: return return_value; @@ -531,4 +534,4 @@ PyDoc_STRVAR(array_arrayiterator___setstate____doc__, #define ARRAY_ARRAYITERATOR___SETSTATE___METHODDEF \ {"__setstate__", (PyCFunction)array_arrayiterator___setstate__, METH_O, array_arrayiterator___setstate____doc__}, -/*[clinic end generated code: output=f649fc0bc9f6b13a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=9f70748dd3bc532f input=a9049054013a1b77]*/ From webhook-mailer at python.org Mon May 11 14:50:22 2020 From: webhook-mailer at python.org (Brad Solomon) Date: Mon, 11 May 2020 18:50:22 -0000 Subject: [Python-checkins] bpo-40561: Add docstrings for webbrowser open functions (GH-19999) Message-ID: https://github.com/python/cpython/commit/ef7973a981ff8f4687ef3fdb85a69fa15aa11fe5 commit: ef7973a981ff8f4687ef3fdb85a69fa15aa11fe5 branch: master author: Brad Solomon committer: GitHub date: 2020-05-11T14:50:11-04:00 summary: bpo-40561: Add docstrings for webbrowser open functions (GH-19999) Co-authored-by: Brad Solomon Co-authored-by: Terry Jan Reedy files: A Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst M Lib/webbrowser.py diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 1ef179a91a6f1..9c73bcfb44ae8 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -69,6 +69,14 @@ def get(using=None): # instead of "from webbrowser import *". def open(url, new=0, autoraise=True): + """Display url using the default browser. + + If possible, open url in a location determined by new. + - 0: the same browser window (the default). + - 1: a new browser window. + - 2: a new browser page ("tab"). + If possible, autoraise raises the window (the default) or not. + """ if _tryorder is None: with _lock: if _tryorder is None: @@ -80,9 +88,17 @@ def open(url, new=0, autoraise=True): return False def open_new(url): + """Open url in a new window of the default browser. + + If not possible, then open url in the only browser window. + """ return open(url, 1) def open_new_tab(url): + """Open url in a new page ("tab") of the default browser. + + If not possible, then the behavior becomes equivalent to open_new(). + """ return open(url, 2) diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst new file mode 100644 index 0000000000000..bda24719b12cb --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst @@ -0,0 +1 @@ +Provide docstrings for webbrowser open functions. From webhook-mailer at python.org Mon May 11 15:06:40 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 11 May 2020 19:06:40 -0000 Subject: [Python-checkins] bpo-40561: Add docstrings for webbrowser open functions (GH-19999) Message-ID: https://github.com/python/cpython/commit/61b49a00e755136586e991c971c47f38bb5e4d23 commit: 61b49a00e755136586e991c971c47f38bb5e4d23 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-11T12:06:32-07:00 summary: bpo-40561: Add docstrings for webbrowser open functions (GH-19999) Co-authored-by: Brad Solomon Co-authored-by: Terry Jan Reedy (cherry picked from commit ef7973a981ff8f4687ef3fdb85a69fa15aa11fe5) Co-authored-by: Brad Solomon files: A Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst M Lib/webbrowser.py diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index e052e51cdec5a..34b86a505c246 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -69,6 +69,14 @@ def get(using=None): # instead of "from webbrowser import *". def open(url, new=0, autoraise=True): + """Display url using the default browser. + + If possible, open url in a location determined by new. + - 0: the same browser window (the default). + - 1: a new browser window. + - 2: a new browser page ("tab"). + If possible, autoraise raises the window (the default) or not. + """ if _tryorder is None: with _lock: if _tryorder is None: @@ -80,9 +88,17 @@ def open(url, new=0, autoraise=True): return False def open_new(url): + """Open url in a new window of the default browser. + + If not possible, then open url in the only browser window. + """ return open(url, 1) def open_new_tab(url): + """Open url in a new page ("tab") of the default browser. + + If not possible, then the behavior becomes equivalent to open_new(). + """ return open(url, 2) diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst new file mode 100644 index 0000000000000..bda24719b12cb --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst @@ -0,0 +1 @@ +Provide docstrings for webbrowser open functions. From webhook-mailer at python.org Mon May 11 15:09:14 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 11 May 2020 19:09:14 -0000 Subject: [Python-checkins] bpo-40561: Add docstrings for webbrowser open functions (GH-19999) Message-ID: https://github.com/python/cpython/commit/a63c61168588937c482435d0432c753de4844c46 commit: a63c61168588937c482435d0432c753de4844c46 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-11T12:09:10-07:00 summary: bpo-40561: Add docstrings for webbrowser open functions (GH-19999) Co-authored-by: Brad Solomon Co-authored-by: Terry Jan Reedy (cherry picked from commit ef7973a981ff8f4687ef3fdb85a69fa15aa11fe5) Co-authored-by: Brad Solomon files: A Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst M Lib/webbrowser.py diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 1ef179a91a6f1..9c73bcfb44ae8 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -69,6 +69,14 @@ def get(using=None): # instead of "from webbrowser import *". def open(url, new=0, autoraise=True): + """Display url using the default browser. + + If possible, open url in a location determined by new. + - 0: the same browser window (the default). + - 1: a new browser window. + - 2: a new browser page ("tab"). + If possible, autoraise raises the window (the default) or not. + """ if _tryorder is None: with _lock: if _tryorder is None: @@ -80,9 +88,17 @@ def open(url, new=0, autoraise=True): return False def open_new(url): + """Open url in a new window of the default browser. + + If not possible, then open url in the only browser window. + """ return open(url, 1) def open_new_tab(url): + """Open url in a new page ("tab") of the default browser. + + If not possible, then the behavior becomes equivalent to open_new(). + """ return open(url, 2) diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst new file mode 100644 index 0000000000000..bda24719b12cb --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst @@ -0,0 +1 @@ +Provide docstrings for webbrowser open functions. From webhook-mailer at python.org Mon May 11 17:39:22 2020 From: webhook-mailer at python.org (Hai Shi) Date: Mon, 11 May 2020 21:39:22 -0000 Subject: [Python-checkins] bpo-40584: Update PyType_FromModuleAndSpec() to process tp_vectorcall_offset (GH-20026) Message-ID: https://github.com/python/cpython/commit/86d69444e7cfe758212956df0be0ec7b8a4251a6 commit: 86d69444e7cfe758212956df0be0ec7b8a4251a6 branch: master author: Hai Shi committer: GitHub date: 2020-05-11T23:38:55+02:00 summary: bpo-40584: Update PyType_FromModuleAndSpec() to process tp_vectorcall_offset (GH-20026) files: M Doc/c-api/structures.rst M Doc/c-api/type.rst M Objects/typeobject.c diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index ea97e1e715561..634e971952e8e 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -424,9 +424,11 @@ Accessing attributes of extension types Heap allocated types (created using :c:func:`PyType_FromSpec` or similar), ``PyMemberDef`` may contain definitions for the special members - ``__dictoffset__`` and ``__weaklistoffset__``, corresponding to - :c:member:`~PyTypeObject.tp_dictoffset` and - :c:member:`~PyTypeObject.tp_weaklistoffset` in type objects. + ``__dictoffset__``, ``__weaklistoffset__`` and ``__vectorcalloffset__``, + corresponding to + :c:member:`~PyTypeObject.tp_dictoffset`, + :c:member:`~PyTypeObject.tp_weaklistoffset` and + :c:member:`~PyTypeObject.tp_vectorcall_offset` in type objects. These must be defined with ``T_PYSSIZET`` and ``READONLY``, for example:: static PyMemberDef spam_type_members[] = { diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index 7dd393f47f1b4..f387279d143ee 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -228,6 +228,7 @@ The following functions and structs are used to create * :c:member:`~PyTypeObject.tp_dictoffset` (see :ref:`PyMemberDef `) * :c:member:`~PyTypeObject.tp_vectorcall_offset` + (see :ref:`PyMemberDef `) * :c:member:`~PyBufferProcs.bf_getbuffer` * :c:member:`~PyBufferProcs.bf_releasebuffer` diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 525f5ac5d5775..a36b4dcc46d21 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2954,10 +2954,10 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) PyTypeObject *type, *base; const PyType_Slot *slot; - Py_ssize_t nmembers, weaklistoffset, dictoffset; + Py_ssize_t nmembers, weaklistoffset, dictoffset, vectorcalloffset; char *res_start; - nmembers = weaklistoffset = dictoffset = 0; + nmembers = weaklistoffset = dictoffset = vectorcalloffset = 0; for (slot = spec->slots; slot->slot; slot++) { if (slot->slot == Py_tp_members) { nmembers = 0; @@ -2975,6 +2975,12 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) assert(memb->flags == READONLY); dictoffset = memb->offset; } + if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + // The PyMemberDef must be a Py_ssize_t and readonly + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + vectorcalloffset = memb->offset; + } } } } @@ -3123,6 +3129,10 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) type->tp_dealloc = subtype_dealloc; } + if (vectorcalloffset) { + type->tp_vectorcall_offset = vectorcalloffset; + } + if (PyType_Ready(type) < 0) goto fail; From webhook-mailer at python.org Mon May 11 17:54:17 2020 From: webhook-mailer at python.org (Shantanu) Date: Mon, 11 May 2020 21:54:17 -0000 Subject: [Python-checkins] bpo-40334: produce specialized errors for invalid del targets (GH-19911) Message-ID: https://github.com/python/cpython/commit/27c0d9b54abaa4112d5a317b8aa78b39ad60a808 commit: 27c0d9b54abaa4112d5a317b8aa78b39ad60a808 branch: master author: Shantanu committer: GitHub date: 2020-05-11T14:53:58-07:00 summary: bpo-40334: produce specialized errors for invalid del targets (GH-19911) files: M Grammar/python.gram M Lib/test/test_grammar.py M Lib/test/test_syntax.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 574e1e1421644..0542107cac3e6 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -583,15 +583,19 @@ ann_assign_subscript_attribute_target[expr_ty]: | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) } del_targets[asdl_seq*]: a=','.del_target+ [','] { a } +# The lookaheads to del_target_end ensure that we don't match expressions where a prefix of the +# expression matches our rule, thereby letting these cases fall through to invalid_del_target. del_target[expr_ty] (memo): - | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) } - | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Del, EXTRA) } + | a=t_primary '.' b=NAME &del_target_end { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) } + | a=t_primary '[' b=slices ']' &del_target_end { _Py_Subscript(a, b, Del, EXTRA) } | del_t_atom del_t_atom[expr_ty]: - | a=NAME { _PyPegen_set_expr_context(p, a, Del) } + | a=NAME &del_target_end { _PyPegen_set_expr_context(p, a, Del) } | '(' a=del_target ')' { _PyPegen_set_expr_context(p, a, Del) } | '(' a=[del_targets] ')' { _Py_Tuple(a, Del, EXTRA) } | '[' a=[del_targets] ']' { _Py_List(a, Del, EXTRA) } + | invalid_del_target +del_target_end: ')' | ']' | ',' | ';' | NEWLINE targets[asdl_seq*]: a=','.target+ [','] { a } target[expr_ty] (memo): @@ -649,3 +653,6 @@ invalid_lambda_star_etc: invalid_double_type_comments: | TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT { RAISE_SYNTAX_ERROR("Cannot have two type comments on def") } +invalid_del_target: + | a=star_expression &del_target_end { + RAISE_SYNTAX_ERROR("cannot delete %s", _PyPegen_get_expr_name(a)) } diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index c24d3529490be..02ba8a8b1579a 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -801,6 +801,23 @@ def test_del_stmt(self): del abc del x, y, (z, xyz) + x, y, z = "xyz" + del x + del y, + del (z) + del () + + a, b, c, d, e, f, g = "abcdefg" + del a, (b, c), (d, (e, f)) + + a, b, c, d, e, f, g = "abcdefg" + del a, [b, c], (d, [e, f]) + + abcd = list("abcd") + del abcd[1:2] + + compile("del a, (b[0].c, (d.e, f.g[1:2])), [h.i.j], ()", "", "exec") + def test_pass_stmt(self): # 'pass' pass diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 0c0fc48e0d3de..06636ae8a149a 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -63,10 +63,9 @@ Traceback (most recent call last): SyntaxError: cannot assign to function call -# Pegen does not support this yet -# >>> del f() -# Traceback (most recent call last): -# SyntaxError: cannot delete function call +>>> del f() +Traceback (most recent call last): +SyntaxError: cannot delete function call >>> a + 1 = 2 Traceback (most recent call last): @@ -665,7 +664,7 @@ def _check_error(self, code, errtext, self.fail("SyntaxError is not a %s" % subclass.__name__) mo = re.search(errtext, str(err)) if mo is None: - self.fail("SyntaxError did not contain '%r'" % (errtext,)) + self.fail("SyntaxError did not contain %r" % (errtext,)) self.assertEqual(err.filename, filename) if lineno is not None: self.assertEqual(err.lineno, lineno) @@ -677,10 +676,36 @@ def _check_error(self, code, errtext, def test_assign_call(self): self._check_error("f() = 1", "assign") - @support.skip_if_new_parser("Pegen does not produce a specialized error " - "message yet") def test_assign_del(self): - self._check_error("del f()", "delete") + self._check_error("del (,)", "invalid syntax") + self._check_error("del 1", "delete literal") + self._check_error("del (1, 2)", "delete literal") + self._check_error("del None", "delete None") + self._check_error("del *x", "delete starred") + self._check_error("del (*x)", "delete starred") + self._check_error("del (*x,)", "delete starred") + self._check_error("del [*x,]", "delete starred") + self._check_error("del f()", "delete function call") + self._check_error("del f(a, b)", "delete function call") + self._check_error("del o.f()", "delete function call") + self._check_error("del a[0]()", "delete function call") + self._check_error("del x, f()", "delete function call") + self._check_error("del f(), x", "delete function call") + self._check_error("del [a, b, ((c), (d,), e.f())]", "delete function call") + self._check_error("del (a if True else b)", "delete conditional") + self._check_error("del +a", "delete operator") + self._check_error("del a, +b", "delete operator") + self._check_error("del a + b", "delete operator") + self._check_error("del (a + b, c)", "delete operator") + self._check_error("del (c[0], a + b)", "delete operator") + self._check_error("del a.b.c + 2", "delete operator") + self._check_error("del a.b.c[0] + 2", "delete operator") + self._check_error("del (a, b, (c, d.e.f + 2))", "delete operator") + self._check_error("del [a, b, (c, d.e.f[0] + 2)]", "delete operator") + self._check_error("del (a := 5)", "delete named expression") + # We don't have a special message for this, but make sure we don't + # report "cannot delete name" + self._check_error("del a += b", "invalid syntax") def test_global_param_err_first(self): source = """if 1: diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 27feda73d99e1..a1a6f4c06bf63 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -204,168 +204,170 @@ static KeywordToken *reserved_keywords[] = { #define del_targets_type 1133 #define del_target_type 1134 #define del_t_atom_type 1135 -#define targets_type 1136 -#define target_type 1137 -#define t_primary_type 1138 // Left-recursive -#define t_lookahead_type 1139 -#define t_atom_type 1140 -#define incorrect_arguments_type 1141 -#define invalid_kwarg_type 1142 -#define invalid_named_expression_type 1143 -#define invalid_assignment_type 1144 -#define invalid_block_type 1145 -#define invalid_comprehension_type 1146 -#define invalid_parameters_type 1147 -#define invalid_star_etc_type 1148 -#define invalid_lambda_star_etc_type 1149 -#define invalid_double_type_comments_type 1150 -#define _loop0_1_type 1151 -#define _loop0_2_type 1152 -#define _loop0_4_type 1153 -#define _gather_3_type 1154 -#define _loop0_6_type 1155 -#define _gather_5_type 1156 -#define _loop0_8_type 1157 -#define _gather_7_type 1158 -#define _loop0_10_type 1159 -#define _gather_9_type 1160 -#define _loop1_11_type 1161 -#define _loop0_13_type 1162 -#define _gather_12_type 1163 -#define _tmp_14_type 1164 -#define _tmp_15_type 1165 -#define _tmp_16_type 1166 -#define _tmp_17_type 1167 -#define _tmp_18_type 1168 -#define _tmp_19_type 1169 -#define _tmp_20_type 1170 -#define _tmp_21_type 1171 -#define _loop1_22_type 1172 -#define _tmp_23_type 1173 -#define _tmp_24_type 1174 -#define _loop0_26_type 1175 -#define _gather_25_type 1176 -#define _loop0_28_type 1177 -#define _gather_27_type 1178 -#define _tmp_29_type 1179 -#define _loop0_30_type 1180 -#define _loop1_31_type 1181 -#define _loop0_33_type 1182 -#define _gather_32_type 1183 -#define _tmp_34_type 1184 -#define _loop0_36_type 1185 -#define _gather_35_type 1186 -#define _tmp_37_type 1187 -#define _loop0_39_type 1188 -#define _gather_38_type 1189 -#define _loop0_41_type 1190 -#define _gather_40_type 1191 -#define _loop0_43_type 1192 -#define _gather_42_type 1193 -#define _loop0_45_type 1194 -#define _gather_44_type 1195 -#define _tmp_46_type 1196 -#define _loop1_47_type 1197 -#define _tmp_48_type 1198 -#define _tmp_49_type 1199 -#define _tmp_50_type 1200 -#define _tmp_51_type 1201 -#define _tmp_52_type 1202 -#define _loop0_53_type 1203 -#define _loop0_54_type 1204 -#define _loop0_55_type 1205 -#define _loop1_56_type 1206 -#define _loop0_57_type 1207 -#define _loop1_58_type 1208 -#define _loop1_59_type 1209 -#define _loop1_60_type 1210 -#define _loop0_61_type 1211 -#define _loop1_62_type 1212 -#define _loop0_63_type 1213 -#define _loop1_64_type 1214 -#define _loop0_65_type 1215 -#define _loop1_66_type 1216 -#define _loop1_67_type 1217 -#define _tmp_68_type 1218 -#define _loop0_70_type 1219 -#define _gather_69_type 1220 -#define _loop1_71_type 1221 -#define _loop0_73_type 1222 -#define _gather_72_type 1223 -#define _loop1_74_type 1224 -#define _loop0_75_type 1225 -#define _loop0_76_type 1226 -#define _loop0_77_type 1227 -#define _loop1_78_type 1228 -#define _loop0_79_type 1229 -#define _loop1_80_type 1230 -#define _loop1_81_type 1231 -#define _loop1_82_type 1232 -#define _loop0_83_type 1233 -#define _loop1_84_type 1234 -#define _loop0_85_type 1235 -#define _loop1_86_type 1236 -#define _loop0_87_type 1237 -#define _loop1_88_type 1238 -#define _loop1_89_type 1239 -#define _loop1_90_type 1240 -#define _loop1_91_type 1241 -#define _tmp_92_type 1242 -#define _loop0_94_type 1243 -#define _gather_93_type 1244 -#define _tmp_95_type 1245 -#define _tmp_96_type 1246 -#define _tmp_97_type 1247 -#define _tmp_98_type 1248 -#define _loop1_99_type 1249 -#define _tmp_100_type 1250 -#define _tmp_101_type 1251 -#define _loop0_103_type 1252 -#define _gather_102_type 1253 -#define _loop1_104_type 1254 -#define _loop0_105_type 1255 -#define _loop0_106_type 1256 -#define _tmp_107_type 1257 -#define _tmp_108_type 1258 -#define _loop0_110_type 1259 -#define _gather_109_type 1260 -#define _loop0_112_type 1261 -#define _gather_111_type 1262 -#define _loop0_114_type 1263 -#define _gather_113_type 1264 -#define _loop0_116_type 1265 -#define _gather_115_type 1266 -#define _loop0_117_type 1267 -#define _loop0_119_type 1268 -#define _gather_118_type 1269 -#define _tmp_120_type 1270 -#define _loop0_122_type 1271 -#define _gather_121_type 1272 -#define _loop0_124_type 1273 -#define _gather_123_type 1274 -#define _tmp_125_type 1275 -#define _tmp_126_type 1276 -#define _tmp_127_type 1277 -#define _tmp_128_type 1278 -#define _tmp_129_type 1279 -#define _loop0_130_type 1280 -#define _tmp_131_type 1281 -#define _tmp_132_type 1282 -#define _tmp_133_type 1283 -#define _tmp_134_type 1284 -#define _tmp_135_type 1285 -#define _tmp_136_type 1286 -#define _tmp_137_type 1287 -#define _tmp_138_type 1288 -#define _tmp_139_type 1289 -#define _tmp_140_type 1290 -#define _tmp_141_type 1291 -#define _tmp_142_type 1292 -#define _tmp_143_type 1293 -#define _tmp_144_type 1294 -#define _loop1_145_type 1295 -#define _tmp_146_type 1296 -#define _tmp_147_type 1297 +#define del_target_end_type 1136 +#define targets_type 1137 +#define target_type 1138 +#define t_primary_type 1139 // Left-recursive +#define t_lookahead_type 1140 +#define t_atom_type 1141 +#define incorrect_arguments_type 1142 +#define invalid_kwarg_type 1143 +#define invalid_named_expression_type 1144 +#define invalid_assignment_type 1145 +#define invalid_block_type 1146 +#define invalid_comprehension_type 1147 +#define invalid_parameters_type 1148 +#define invalid_star_etc_type 1149 +#define invalid_lambda_star_etc_type 1150 +#define invalid_double_type_comments_type 1151 +#define invalid_del_target_type 1152 +#define _loop0_1_type 1153 +#define _loop0_2_type 1154 +#define _loop0_4_type 1155 +#define _gather_3_type 1156 +#define _loop0_6_type 1157 +#define _gather_5_type 1158 +#define _loop0_8_type 1159 +#define _gather_7_type 1160 +#define _loop0_10_type 1161 +#define _gather_9_type 1162 +#define _loop1_11_type 1163 +#define _loop0_13_type 1164 +#define _gather_12_type 1165 +#define _tmp_14_type 1166 +#define _tmp_15_type 1167 +#define _tmp_16_type 1168 +#define _tmp_17_type 1169 +#define _tmp_18_type 1170 +#define _tmp_19_type 1171 +#define _tmp_20_type 1172 +#define _tmp_21_type 1173 +#define _loop1_22_type 1174 +#define _tmp_23_type 1175 +#define _tmp_24_type 1176 +#define _loop0_26_type 1177 +#define _gather_25_type 1178 +#define _loop0_28_type 1179 +#define _gather_27_type 1180 +#define _tmp_29_type 1181 +#define _loop0_30_type 1182 +#define _loop1_31_type 1183 +#define _loop0_33_type 1184 +#define _gather_32_type 1185 +#define _tmp_34_type 1186 +#define _loop0_36_type 1187 +#define _gather_35_type 1188 +#define _tmp_37_type 1189 +#define _loop0_39_type 1190 +#define _gather_38_type 1191 +#define _loop0_41_type 1192 +#define _gather_40_type 1193 +#define _loop0_43_type 1194 +#define _gather_42_type 1195 +#define _loop0_45_type 1196 +#define _gather_44_type 1197 +#define _tmp_46_type 1198 +#define _loop1_47_type 1199 +#define _tmp_48_type 1200 +#define _tmp_49_type 1201 +#define _tmp_50_type 1202 +#define _tmp_51_type 1203 +#define _tmp_52_type 1204 +#define _loop0_53_type 1205 +#define _loop0_54_type 1206 +#define _loop0_55_type 1207 +#define _loop1_56_type 1208 +#define _loop0_57_type 1209 +#define _loop1_58_type 1210 +#define _loop1_59_type 1211 +#define _loop1_60_type 1212 +#define _loop0_61_type 1213 +#define _loop1_62_type 1214 +#define _loop0_63_type 1215 +#define _loop1_64_type 1216 +#define _loop0_65_type 1217 +#define _loop1_66_type 1218 +#define _loop1_67_type 1219 +#define _tmp_68_type 1220 +#define _loop0_70_type 1221 +#define _gather_69_type 1222 +#define _loop1_71_type 1223 +#define _loop0_73_type 1224 +#define _gather_72_type 1225 +#define _loop1_74_type 1226 +#define _loop0_75_type 1227 +#define _loop0_76_type 1228 +#define _loop0_77_type 1229 +#define _loop1_78_type 1230 +#define _loop0_79_type 1231 +#define _loop1_80_type 1232 +#define _loop1_81_type 1233 +#define _loop1_82_type 1234 +#define _loop0_83_type 1235 +#define _loop1_84_type 1236 +#define _loop0_85_type 1237 +#define _loop1_86_type 1238 +#define _loop0_87_type 1239 +#define _loop1_88_type 1240 +#define _loop1_89_type 1241 +#define _loop1_90_type 1242 +#define _loop1_91_type 1243 +#define _tmp_92_type 1244 +#define _loop0_94_type 1245 +#define _gather_93_type 1246 +#define _tmp_95_type 1247 +#define _tmp_96_type 1248 +#define _tmp_97_type 1249 +#define _tmp_98_type 1250 +#define _loop1_99_type 1251 +#define _tmp_100_type 1252 +#define _tmp_101_type 1253 +#define _loop0_103_type 1254 +#define _gather_102_type 1255 +#define _loop1_104_type 1256 +#define _loop0_105_type 1257 +#define _loop0_106_type 1258 +#define _tmp_107_type 1259 +#define _tmp_108_type 1260 +#define _loop0_110_type 1261 +#define _gather_109_type 1262 +#define _loop0_112_type 1263 +#define _gather_111_type 1264 +#define _loop0_114_type 1265 +#define _gather_113_type 1266 +#define _loop0_116_type 1267 +#define _gather_115_type 1268 +#define _loop0_117_type 1269 +#define _loop0_119_type 1270 +#define _gather_118_type 1271 +#define _tmp_120_type 1272 +#define _loop0_122_type 1273 +#define _gather_121_type 1274 +#define _loop0_124_type 1275 +#define _gather_123_type 1276 +#define _tmp_125_type 1277 +#define _tmp_126_type 1278 +#define _tmp_127_type 1279 +#define _tmp_128_type 1280 +#define _tmp_129_type 1281 +#define _loop0_130_type 1282 +#define _tmp_131_type 1283 +#define _tmp_132_type 1284 +#define _tmp_133_type 1285 +#define _tmp_134_type 1286 +#define _tmp_135_type 1287 +#define _tmp_136_type 1288 +#define _tmp_137_type 1289 +#define _tmp_138_type 1290 +#define _tmp_139_type 1291 +#define _tmp_140_type 1292 +#define _tmp_141_type 1293 +#define _tmp_142_type 1294 +#define _tmp_143_type 1295 +#define _tmp_144_type 1296 +#define _loop1_145_type 1297 +#define _tmp_146_type 1298 +#define _tmp_147_type 1299 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -503,6 +505,7 @@ static expr_ty ann_assign_subscript_attribute_target_rule(Parser *p); static asdl_seq* del_targets_rule(Parser *p); static expr_ty del_target_rule(Parser *p); static expr_ty del_t_atom_rule(Parser *p); +static void *del_target_end_rule(Parser *p); static asdl_seq* targets_rule(Parser *p); static expr_ty target_rule(Parser *p); static expr_ty t_primary_rule(Parser *p); @@ -518,6 +521,7 @@ static void *invalid_parameters_rule(Parser *p); static void *invalid_star_etc_rule(Parser *p); static void *invalid_lambda_star_etc_rule(Parser *p); static void *invalid_double_type_comments_rule(Parser *p); +static void *invalid_del_target_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); static asdl_seq *_loop0_4_rule(Parser *p); @@ -9786,8 +9790,8 @@ del_targets_rule(Parser *p) } // del_target: -// | t_primary '.' NAME !t_lookahead -// | t_primary '[' slices ']' !t_lookahead +// | t_primary '.' NAME &del_target_end +// | t_primary '[' slices ']' &del_target_end // | del_t_atom static expr_ty del_target_rule(Parser *p) @@ -9807,7 +9811,7 @@ del_target_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // t_primary '.' NAME !t_lookahead + { // t_primary '.' NAME &del_target_end Token * _literal; expr_ty a; expr_ty b; @@ -9818,7 +9822,7 @@ del_target_rule(Parser *p) && (b = _PyPegen_name_token(p)) // NAME && - _PyPegen_lookahead(0, t_lookahead_rule, p) + _PyPegen_lookahead(1, del_target_end_rule, p) ) { Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9838,7 +9842,7 @@ del_target_rule(Parser *p) } p->mark = _mark; } - { // t_primary '[' slices ']' !t_lookahead + { // t_primary '[' slices ']' &del_target_end Token * _literal; Token * _literal_1; expr_ty a; @@ -9852,7 +9856,7 @@ del_target_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && - _PyPegen_lookahead(0, t_lookahead_rule, p) + _PyPegen_lookahead(1, del_target_end_rule, p) ) { Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9889,7 +9893,12 @@ del_target_rule(Parser *p) return _res; } -// del_t_atom: NAME | '(' del_target ')' | '(' del_targets? ')' | '[' del_targets? ']' +// del_t_atom: +// | NAME &del_target_end +// | '(' del_target ')' +// | '(' del_targets? ')' +// | '[' del_targets? ']' +// | invalid_del_target static expr_ty del_t_atom_rule(Parser *p) { @@ -9906,10 +9915,12 @@ del_t_atom_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME + { // NAME &del_target_end expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME + && + _PyPegen_lookahead(1, del_target_end_rule, p) ) { _res = _PyPegen_set_expr_context ( p , a , Del ); @@ -10000,6 +10011,86 @@ del_t_atom_rule(Parser *p) } p->mark = _mark; } + { // invalid_del_target + void *invalid_del_target_var; + if ( + (invalid_del_target_var = invalid_del_target_rule(p)) // invalid_del_target + ) + { + _res = invalid_del_target_var; + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; +} + +// del_target_end: ')' | ']' | ',' | ';' | NEWLINE +static void * +del_target_end_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ')' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // ']' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // ',' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // ';' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 13)) // token=';' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // NEWLINE + Token * newline_var; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = newline_var; + goto done; + } + p->mark = _mark; + } _res = NULL; done: return _res; @@ -10962,6 +11053,37 @@ invalid_double_type_comments_rule(Parser *p) return _res; } +// invalid_del_target: star_expression &del_target_end +static void * +invalid_del_target_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_expression &del_target_end + expr_ty a; + if ( + (a = star_expression_rule(p)) // star_expression + && + _PyPegen_lookahead(1, del_target_end_rule, p) + ) + { + _res = RAISE_SYNTAX_ERROR ( "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; +} + // _loop0_1: NEWLINE static asdl_seq * _loop0_1_rule(Parser *p) From webhook-mailer at python.org Mon May 11 19:43:58 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 11 May 2020 23:43:58 -0000 Subject: [Python-checkins] bpo-39465: Don't access directly _Py_Identifier members (GH-20043) Message-ID: https://github.com/python/cpython/commit/4804b5b3df82e7892ca0550b02f902bcfc16bb48 commit: 4804b5b3df82e7892ca0550b02f902bcfc16bb48 branch: master author: Victor Stinner committer: GitHub date: 2020-05-12T01:43:38+02:00 summary: bpo-39465: Don't access directly _Py_Identifier members (GH-20043) * Replace id->object with _PyUnicode_FromId(&id) * Use _Py_static_string_init(str) macro to initialize statically name_op in typeobject.c. files: M Modules/_cursesmodule.c M Objects/abstract.c M Objects/typeobject.c M Python/ceval.c diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c index 08991fd54808f..c70b0e2a19fad 100644 --- a/Modules/_cursesmodule.c +++ b/Modules/_cursesmodule.c @@ -3814,7 +3814,7 @@ update_lines_cols(void) return 0; } /* PyId_LINES.object will be initialized here. */ - if (PyDict_SetItem(ModDict, PyId_LINES.object, o)) { + if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_LINES), o)) { Py_DECREF(m); Py_DECREF(o); return 0; @@ -3830,7 +3830,7 @@ update_lines_cols(void) Py_DECREF(o); return 0; } - if (PyDict_SetItem(ModDict, PyId_COLS.object, o)) { + if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_COLS), o)) { Py_DECREF(m); Py_DECREF(o); return 0; diff --git a/Objects/abstract.c b/Objects/abstract.c index 6e390dd92c3ae..b014f79e8d0fb 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -2287,7 +2287,7 @@ method_output_as_list(PyObject *o, _Py_Identifier *meth_id) PyErr_Format(PyExc_TypeError, "%.200s.%U() returned a non-iterable (type %.200s)", Py_TYPE(o)->tp_name, - meth_id->object, + _PyUnicode_FromId(meth_id), Py_TYPE(meth_output)->tp_name); } Py_DECREF(meth_output); diff --git a/Objects/typeobject.c b/Objects/typeobject.c index a36b4dcc46d21..243f8811b6257 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -1519,7 +1519,7 @@ lookup_method(PyObject *self, _Py_Identifier *attrid, int *unbound) { PyObject *res = lookup_maybe_method(self, attrid, unbound); if (res == NULL && !PyErr_Occurred()) { - PyErr_SetObject(PyExc_AttributeError, attrid->object); + PyErr_SetObject(PyExc_AttributeError, _PyUnicode_FromId(attrid)); } return res; } @@ -6864,12 +6864,12 @@ slot_tp_setattro(PyObject *self, PyObject *name, PyObject *value) } static _Py_Identifier name_op[] = { - {0, "__lt__", 0}, - {0, "__le__", 0}, - {0, "__eq__", 0}, - {0, "__ne__", 0}, - {0, "__gt__", 0}, - {0, "__ge__", 0} + _Py_static_string_init("__lt__"), + _Py_static_string_init("__le__"), + _Py_static_string_init("__eq__"), + _Py_static_string_init("__ne__"), + _Py_static_string_init("__gt__"), + _Py_static_string_init("__ge__"), }; static PyObject * diff --git a/Python/ceval.c b/Python/ceval.c index 6435bd05446aa..e54e344a5fd51 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -4414,7 +4414,7 @@ special_lookup(PyThreadState *tstate, PyObject *o, _Py_Identifier *id) PyObject *res; res = _PyObject_LookupSpecial(o, id); if (res == NULL && !_PyErr_Occurred(tstate)) { - _PyErr_SetObject(tstate, PyExc_AttributeError, id->object); + _PyErr_SetObject(tstate, PyExc_AttributeError, _PyUnicode_FromId(id)); return NULL; } return res; From webhook-mailer at python.org Mon May 11 20:00:58 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 12 May 2020 00:00:58 -0000 Subject: [Python-checkins] bpo-40571: Make lru_cache(maxsize=None) more discoverable (GH-20019) Message-ID: https://github.com/python/cpython/commit/21cdb711e3b1975398c54141e519ead02670610e commit: 21cdb711e3b1975398c54141e519ead02670610e branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-11T17:00:53-07:00 summary: bpo-40571: Make lru_cache(maxsize=None) more discoverable (GH-20019) files: A Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst M Doc/library/functools.rst M Lib/functools.py M Lib/test/test_functools.py diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 856c1c790ae36..204e66ae5ac40 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -26,6 +26,32 @@ function for the purposes of this module. The :mod:`functools` module defines the following functions: +.. decorator:: cache(user_function) + + Simple lightweight unbounded function cache. Sometimes called + `"memoize" `_. + + Returns the same as ``lru_cache(maxsize=None)``, creating a thin + wrapper around a dictionary lookup for the function arguments. Because it + never needs to evict old values, this is smaller and faster than + :func:`lru_cache()` with a size limit. + + For example:: + + @cache + def factorial(n): + return n * factorial(n-1) if n else 1 + + >>> factorial(10) # no previously cached result, makes 11 recursive calls + 3628800 + >>> factorial(5) # just looks up cached value result + 120 + >>> factorial(12) # makes two new recursive calls, the other 10 are cached + 479001600 + + .. versionadded:: 3.9 + + .. decorator:: cached_property(func) Transform a method of a class into a property whose value is computed once diff --git a/Lib/functools.py b/Lib/functools.py index f05b106b62c00..87c7d87438998 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -10,7 +10,7 @@ # See C source code for _functools credits/copyright __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', - 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', + 'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce', 'TopologicalSorter', 'CycleError', 'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod', 'cached_property'] @@ -888,6 +888,15 @@ def cache_clear(): pass +################################################################################ +### cache -- simplified access to the infinity cache +################################################################################ + +def cache(user_function, /): + 'Simple lightweight unbounded cache. Sometimes called "memoize".' + return lru_cache(maxsize=None)(user_function) + + ################################################################################ ### singledispatch() - single-dispatch generic function decorator ################################################################################ diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index b3893a15566fa..e122fe0b33340 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -1432,6 +1432,25 @@ def check_order_with_hash_seed(seed): self.assertEqual(run1, run2) +class TestCache: + # This tests that the pass-through is working as designed. + # The underlying functionality is tested in TestLRU. + + def test_cache(self): + @self.module.cache + def fib(n): + if n < 2: + return n + return fib(n-1) + fib(n-2) + self.assertEqual([fib(n) for n in range(16)], + [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]) + self.assertEqual(fib.cache_info(), + self.module._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16)) + fib.cache_clear() + self.assertEqual(fib.cache_info(), + self.module._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0)) + + class TestLRU: def test_lru(self): diff --git a/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst b/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst new file mode 100644 index 0000000000000..476770f6974d2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst @@ -0,0 +1,2 @@ +Added functools.cache() as a simpler, more discoverable way to access the +unbounded cache variant of lru_cache(maxsize=None). From webhook-mailer at python.org Mon May 11 20:42:23 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 12 May 2020 00:42:23 -0000 Subject: [Python-checkins] bpo-40602: Rename hashtable.h to pycore_hashtable.h (GH-20044) Message-ID: https://github.com/python/cpython/commit/b617993b7c0b0f6f679ef7003a62d0318b6d6af9 commit: b617993b7c0b0f6f679ef7003a62d0318b6d6af9 branch: master author: Victor Stinner committer: GitHub date: 2020-05-12T02:42:19+02:00 summary: bpo-40602: Rename hashtable.h to pycore_hashtable.h (GH-20044) * Move Modules/hashtable.h to Include/internal/pycore_hashtable.h * Move Modules/hashtable.c to Python/hashtable.c * Python is now linked to hashtable.c. _tracemalloc is no longer linked to hashtable.c. Previously, marshal.c got hashtable.c via _tracemalloc.c which is built as a builtin module. files: A Include/internal/pycore_hashtable.h A Python/hashtable.c D Modules/hashtable.c D Modules/hashtable.h M Makefile.pre.in M Modules/Setup M Modules/_tracemalloc.c M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters M Python/marshal.c diff --git a/Modules/hashtable.h b/Include/internal/pycore_hashtable.h similarity index 96% rename from Modules/hashtable.h rename to Include/internal/pycore_hashtable.h index dbec23d285187..585f76b51d711 100644 --- a/Modules/hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -1,7 +1,12 @@ -#ifndef Py_HASHTABLE_H -#define Py_HASHTABLE_H -/* The whole API is private */ -#ifndef Py_LIMITED_API +#ifndef Py_INTERNAL_HASHTABLE_H +#define Py_INTERNAL_HASHTABLE_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif /* Single linked list */ @@ -207,5 +212,7 @@ PyAPI_FUNC(int) _Py_hashtable_pop( _Py_hashtable_pop(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) -#endif /* Py_LIMITED_API */ +#ifdef __cplusplus +} #endif +#endif /* !Py_INTERNAL_HASHTABLE_H */ diff --git a/Makefile.pre.in b/Makefile.pre.in index 0d616d304484c..d545a9efb3cd9 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -351,6 +351,7 @@ PYTHON_OBJS= \ Python/getversion.o \ Python/graminit.o \ Python/hamt.o \ + Python/hashtable.o \ Python/import.o \ Python/importdl.o \ Python/initconfig.o \ @@ -1131,6 +1132,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_getopt.h \ $(srcdir)/Include/internal/pycore_gil.h \ $(srcdir)/Include/internal/pycore_hamt.h \ + $(srcdir)/Include/internal/pycore_hashtable.h \ $(srcdir)/Include/internal/pycore_import.h \ $(srcdir)/Include/internal/pycore_initconfig.h \ $(srcdir)/Include/internal/pycore_interp.h \ diff --git a/Modules/Setup b/Modules/Setup index 6bf142419de3d..87e73bac78fae 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -132,7 +132,7 @@ faulthandler faulthandler.c # # bpo-35053: The module must be builtin since _Py_NewReference() # can call _PyTraceMalloc_NewReference(). -_tracemalloc _tracemalloc.c hashtable.c +_tracemalloc _tracemalloc.c # PEG-based parser module -- slated to be *the* parser _peg_parser _peg_parser.c diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index ea7e0127366ab..f22338166d0dc 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -2,7 +2,7 @@ #include "pycore_gc.h" // PyGC_Head #include "pycore_pymem.h" // _Py_tracemalloc_config #include "pycore_traceback.h" -#include "hashtable.h" +#include "pycore_hashtable.h" #include "frameobject.h" // PyFrame_GetBack() #include "clinic/_tracemalloc.c.h" diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 73274ac9acf55..b6b0cf3e991ba 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -181,6 +181,7 @@ + @@ -335,7 +336,6 @@ - @@ -462,6 +462,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 254c8fbbea5fb..10dfffba6113e 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -237,6 +237,9 @@ Include + + Include + Include @@ -1034,6 +1037,9 @@ Python + + Modules + Python @@ -1142,9 +1148,6 @@ Modules - - Modules - PC diff --git a/Modules/hashtable.c b/Python/hashtable.c similarity index 99% rename from Modules/hashtable.c rename to Python/hashtable.c index 4a36a1e71cdd0..22b84590105f9 100644 --- a/Modules/hashtable.c +++ b/Python/hashtable.c @@ -45,7 +45,7 @@ */ #include "Python.h" -#include "hashtable.h" +#include "pycore_hashtable.h" #define HASHTABLE_MIN_SIZE 16 #define HASHTABLE_HIGH 0.50 diff --git a/Python/marshal.c b/Python/marshal.c index b4429aea502d3..d2bff524f30dd 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -12,7 +12,7 @@ #include "longintrepr.h" #include "code.h" #include "marshal.h" -#include "../Modules/hashtable.h" +#include "pycore_hashtable.h" /*[clinic input] module marshal From webhook-mailer at python.org Mon May 11 21:07:44 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 12 May 2020 01:07:44 -0000 Subject: [Python-checkins] bpo-40602: _Py_hashtable_new() uses PyMem_Malloc() (GH-20046) Message-ID: https://github.com/python/cpython/commit/d0919f0d6bb757b6bcfd7b2e15656d318c9d5cd9 commit: d0919f0d6bb757b6bcfd7b2e15656d318c9d5cd9 branch: master author: Victor Stinner committer: GitHub date: 2020-05-12T03:07:40+02:00 summary: bpo-40602: _Py_hashtable_new() uses PyMem_Malloc() (GH-20046) _Py_hashtable_new() now uses PyMem_Malloc/PyMem_Free allocator by default, rather than PyMem_RawMalloc/PyMem_RawFree. PyMem_Malloc is faster than PyMem_RawMalloc for memory blocks smaller than or equal to 512 bytes. files: M Python/hashtable.c diff --git a/Python/hashtable.c b/Python/hashtable.c index 22b84590105f9..e9f02d8650e4f 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -149,11 +149,12 @@ _Py_hashtable_new_full(size_t key_size, size_t data_size, _Py_hashtable_allocator_t alloc; if (allocator == NULL) { - alloc.malloc = PyMem_RawMalloc; - alloc.free = PyMem_RawFree; + alloc.malloc = PyMem_Malloc; + alloc.free = PyMem_Free; } - else + else { alloc = *allocator; + } ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); if (ht == NULL) From webhook-mailer at python.org Mon May 11 22:19:41 2020 From: webhook-mailer at python.org (Tim Peters) Date: Tue, 12 May 2020 02:19:41 -0000 Subject: [Python-checkins] bpo-40480: restore ability to join fnmatch.translate() results (GH-20049) Message-ID: https://github.com/python/cpython/commit/b1b4c790e7d3b5f4244450aefe3d8f01710c13f7 commit: b1b4c790e7d3b5f4244450aefe3d8f01710c13f7 branch: master author: Tim Peters committer: GitHub date: 2020-05-11T21:19:20-05:00 summary: bpo-40480: restore ability to join fnmatch.translate() results (GH-20049) In translate(), generate unique group names across calls. The restores the undocumented ability to get a valid regexp by joining multiple translate() results via `|`. files: M Lib/fnmatch.py M Lib/test/test_fnmatch.py diff --git a/Lib/fnmatch.py b/Lib/fnmatch.py index d7d915d51314d..0eb1802bdb53c 100644 --- a/Lib/fnmatch.py +++ b/Lib/fnmatch.py @@ -16,6 +16,12 @@ __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] +# Build a thread-safe incrementing counter to help create unique regexp group +# names across calls. +from itertools import count +_nextgroupnum = count().__next__ +del count + def fnmatch(name, pat): """Test whether FILENAME matches PATTERN. @@ -148,9 +154,12 @@ def translate(pat): # in a lookahead assertion, save the matched part in a group, then # consume that group via a backreference. If the overall match fails, # the lookahead assertion won't try alternatives. So the translation is: - # (?=(P.*?fixed))(?P=name) - # Group names are created as needed: g1, g2, g3, ... - groupnum = 0 + # (?=(?P.*?fixed))(?P=name) + # Group names are created as needed: g0, g1, g2, ... + # The numbers are obtained from _nextgroupnum() to ensure they're unique + # across calls and across threads. This is because people rely on the + # undocumented ability to join multiple translate() results together via + # "|" to build large regexps matching "one of many" shell patterns. while i < n: assert inp[i] is STAR i += 1 @@ -167,7 +176,7 @@ def translate(pat): add(".*") add(fixed) else: - groupnum += 1 + groupnum = _nextgroupnum() add(f"(?=(?P.*?{fixed}))(?P=g{groupnum})") assert i == n res = "".join(res) diff --git a/Lib/test/test_fnmatch.py b/Lib/test/test_fnmatch.py index 4c173069503cc..10668e4f6103a 100644 --- a/Lib/test/test_fnmatch.py +++ b/Lib/test/test_fnmatch.py @@ -106,6 +106,7 @@ def test_warnings(self): class TranslateTestCase(unittest.TestCase): def test_translate(self): + import re self.assertEqual(translate('*'), r'(?s:.*)\Z') self.assertEqual(translate('?'), r'(?s:.)\Z') self.assertEqual(translate('a?b*'), r'(?s:a.b.*)\Z') @@ -122,9 +123,26 @@ def test_translate(self): self.assertEqual(translate('*********A'), r'(?s:.*A)\Z') self.assertEqual(translate('A*********?[?]?'), r'(?s:A.*.[?].)\Z') # fancy translation to prevent exponential-time match failure - self.assertEqual(translate('**a*a****a'), - r'(?s:(?=(?P.*?a))(?P=g1)(?=(?P.*?a))(?P=g2).*a)\Z') - + t = translate('**a*a****a') + digits = re.findall(r'\d+', t) + self.assertEqual(len(digits), 4) + self.assertEqual(digits[0], digits[1]) + self.assertEqual(digits[2], digits[3]) + g1 = f"g{digits[0]}" # e.g., group name "g4" + g2 = f"g{digits[2]}" # e.g., group name "g5" + self.assertEqual(t, + fr'(?s:(?=(?P<{g1}>.*?a))(?P={g1})(?=(?P<{g2}>.*?a))(?P={g2}).*a)\Z') + # and try pasting multiple translate results - it's an undocumented + # feature that this works; all the pain of generating unique group + # names across calls exists to support this + r1 = translate('**a**a**a*') + r2 = translate('**b**b**b*') + r3 = translate('*c*c*c*') + fatre = "|".join([r1, r2, r3]) + self.assertTrue(re.match(fatre, 'abaccad')) + self.assertTrue(re.match(fatre, 'abxbcab')) + self.assertTrue(re.match(fatre, 'cbabcaxc')) + self.assertFalse(re.match(fatre, 'dabccbad')) class FilterTestCase(unittest.TestCase): From webhook-mailer at python.org Mon May 11 22:32:48 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Tue, 12 May 2020 02:32:48 -0000 Subject: [Python-checkins] bpo-39481: remove generic classes from ipaddress/mmap (GH-20045) Message-ID: https://github.com/python/cpython/commit/f3a5b7ada0c951f317dbd307de4b410e58d3e1b3 commit: f3a5b7ada0c951f317dbd307de4b410e58d3e1b3 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-11T19:32:40-07:00 summary: bpo-39481: remove generic classes from ipaddress/mmap (GH-20045) These were added by mistake (see https://bugs.python.org/issue39481#msg366288). files: M Lib/ipaddress.py M Lib/test/test_genericalias.py M Modules/mmapmodule.c diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 439f241817468..6e5a754c2acf1 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -12,7 +12,6 @@ import functools -import types IPV4LENGTH = 32 IPV6LENGTH = 128 @@ -1125,8 +1124,6 @@ def is_loopback(self): return (self.network_address.is_loopback and self.broadcast_address.is_loopback) - __class_getitem__ = classmethod(types.GenericAlias) - class _BaseV4: """Base IPv4 object. @@ -1446,8 +1443,6 @@ def with_hostmask(self): return '%s/%s' % (self._string_from_ip_int(self._ip), self.hostmask) - __class_getitem__ = classmethod(types.GenericAlias) - class IPv4Network(_BaseV4, _BaseNetwork): @@ -2156,8 +2151,6 @@ def is_unspecified(self): def is_loopback(self): return self._ip == 1 and self.network.is_loopback - __class_getitem__ = classmethod(types.GenericAlias) - class IPv6Network(_BaseV6, _BaseNetwork): diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py index 024b2f6ed6636..4f3798e8f87d8 100644 --- a/Lib/test/test_genericalias.py +++ b/Lib/test/test_genericalias.py @@ -17,8 +17,6 @@ from difflib import SequenceMatcher from filecmp import dircmp from fileinput import FileInput -from mmap import mmap -from ipaddress import IPv4Network, IPv4Interface, IPv6Network, IPv6Interface from itertools import chain from http.cookies import Morsel from multiprocessing.managers import ValueProxy @@ -49,7 +47,6 @@ class BaseTest(unittest.TestCase): def test_subscriptable(self): for t in (type, tuple, list, dict, set, frozenset, enumerate, - mmap, defaultdict, deque, SequenceMatcher, dircmp, @@ -74,7 +71,6 @@ def test_subscriptable(self): Sequence, MutableSequence, MappingProxyType, AsyncGeneratorType, DirEntry, - IPv4Network, IPv4Interface, IPv6Network, IPv6Interface, chain, TemporaryDirectory, SpooledTemporaryFile, Queue, SimpleQueue, diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index 6c503b3429b23..a3e22d0a5110d 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -816,8 +816,6 @@ static struct PyMethodDef mmap_object_methods[] = { #ifdef MS_WINDOWS {"__sizeof__", (PyCFunction) mmap__sizeof__method, METH_NOARGS}, #endif - {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, - PyDoc_STR("See PEP 585")}, {NULL, NULL} /* sentinel */ }; From webhook-mailer at python.org Tue May 12 05:42:33 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Tue, 12 May 2020 09:42:33 -0000 Subject: [Python-checkins] bpo-40593: Improve syntax errors for invalid characters in source code. (GH-20033) Message-ID: https://github.com/python/cpython/commit/74ea6b5a7501fb393cd567fb21998d0bfeeb267c commit: 74ea6b5a7501fb393cd567fb21998d0bfeeb267c branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-12T12:42:04+03:00 summary: bpo-40593: Improve syntax errors for invalid characters in source code. (GH-20033) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst M Include/cpython/unicodeobject.h M Include/errcode.h M Lib/test/test_fstring.py M Lib/test/test_source_encoding.py M Lib/test/test_unicode_identifiers.py M Objects/unicodeobject.c M Parser/pegen/pegen.c M Parser/tokenizer.c M Python/pythonrun.c diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 81a35cdc801d0..94326876292b6 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -1222,6 +1222,8 @@ PyAPI_FUNC(void) _PyUnicode_ClearStaticStrings(void); and where the hash values are equal (i.e. a very probable match) */ PyAPI_FUNC(int) _PyUnicode_EQ(PyObject *, PyObject *); +PyAPI_FUNC(Py_ssize_t) _PyUnicode_ScanIdentifier(PyObject *); + #ifdef __cplusplus } #endif diff --git a/Include/errcode.h b/Include/errcode.h index b37cd261d5ec4..790518b8b7730 100644 --- a/Include/errcode.h +++ b/Include/errcode.h @@ -29,7 +29,6 @@ extern "C" { #define E_EOFS 23 /* EOF in triple-quoted string */ #define E_EOLS 24 /* EOL in single-quoted string */ #define E_LINECONT 25 /* Unexpected characters after a line continuation */ -#define E_IDENTIFIER 26 /* Invalid characters in identifier */ #define E_BADSINGLE 27 /* Ill-formed single statement input */ #ifdef __cplusplus diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index ac5aa9a76efe7..e0bb5b56b2614 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -583,7 +583,7 @@ def test_missing_expression(self): ]) # Different error message is raised for other whitespace characters. - self.assertAllRaise(SyntaxError, 'invalid character in identifier', + self.assertAllRaise(SyntaxError, r"invalid non-printable character U\+00A0", ["f'''{\xa0}'''", "\xa0", ]) diff --git a/Lib/test/test_source_encoding.py b/Lib/test/test_source_encoding.py index a0bd741c36ac2..5ca43461d9940 100644 --- a/Lib/test/test_source_encoding.py +++ b/Lib/test/test_source_encoding.py @@ -57,6 +57,9 @@ def test_issue7820(self): # one byte in common with the UTF-16-LE BOM self.assertRaises(SyntaxError, eval, b'\xff\x20') + # one byte in common with the UTF-8 BOM + self.assertRaises(SyntaxError, eval, b'\xef\x20') + # two bytes in common with the UTF-8 BOM self.assertRaises(SyntaxError, eval, b'\xef\xbb\x20') diff --git a/Lib/test/test_unicode_identifiers.py b/Lib/test/test_unicode_identifiers.py index 07332c4631903..5b9ced5d1cb83 100644 --- a/Lib/test/test_unicode_identifiers.py +++ b/Lib/test/test_unicode_identifiers.py @@ -20,9 +20,11 @@ def test_non_bmp_normalized(self): def test_invalid(self): try: from test import badsyntax_3131 - except SyntaxError as s: - self.assertEqual(str(s), - "invalid character in identifier (badsyntax_3131.py, line 2)") + except SyntaxError as err: + self.assertEqual(str(err), + "invalid character '?' (U+20AC) (badsyntax_3131.py, line 2)") + self.assertEqual(err.lineno, 2) + self.assertEqual(err.offset, 1) else: self.fail("expected exception didn't occur") diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst new file mode 100644 index 0000000000000..5587d4f49ccf9 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst @@ -0,0 +1 @@ +Improved syntax errors for invalid characters in source code. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 18b9458721de1..276547ca48a5b 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -12309,31 +12309,22 @@ unicode_isnumeric_impl(PyObject *self) Py_RETURN_TRUE; } -int -PyUnicode_IsIdentifier(PyObject *self) +Py_ssize_t +_PyUnicode_ScanIdentifier(PyObject *self) { Py_ssize_t i; - int ready = PyUnicode_IS_READY(self); + if (PyUnicode_READY(self) == -1) + return -1; - Py_ssize_t len = ready ? PyUnicode_GET_LENGTH(self) : PyUnicode_GET_SIZE(self); + Py_ssize_t len = PyUnicode_GET_LENGTH(self); if (len == 0) { /* an empty string is not a valid identifier */ return 0; } - int kind = 0; - const void *data = NULL; - const wchar_t *wstr = NULL; - Py_UCS4 ch; - if (ready) { - kind = PyUnicode_KIND(self); - data = PyUnicode_DATA(self); - ch = PyUnicode_READ(kind, data, 0); - } - else { - wstr = _PyUnicode_WSTR(self); - ch = wstr[0]; - } + int kind = PyUnicode_KIND(self); + const void *data = PyUnicode_DATA(self); + Py_UCS4 ch = PyUnicode_READ(kind, data, 0); /* PEP 3131 says that the first character must be in XID_Start and subsequent characters in XID_Continue, and for the ASCII range, the 2.x rules apply (i.e @@ -12347,17 +12338,44 @@ PyUnicode_IsIdentifier(PyObject *self) } for (i = 1; i < len; i++) { - if (ready) { - ch = PyUnicode_READ(kind, data, i); + ch = PyUnicode_READ(kind, data, i); + if (!_PyUnicode_IsXidContinue(ch)) { + return i; } - else { - ch = wstr[i]; + } + return i; +} + +int +PyUnicode_IsIdentifier(PyObject *self) +{ + if (PyUnicode_IS_READY(self)) { + Py_ssize_t i = _PyUnicode_ScanIdentifier(self); + Py_ssize_t len = PyUnicode_GET_LENGTH(self); + /* an empty string is not a valid identifier */ + return len && i == len; + } + else { + Py_ssize_t i, len = PyUnicode_GET_SIZE(self); + if (len == 0) { + /* an empty string is not a valid identifier */ + return 0; } - if (!_PyUnicode_IsXidContinue(ch)) { + + const wchar_t *wstr = _PyUnicode_WSTR(self); + Py_UCS4 ch = wstr[0]; + if (!_PyUnicode_IsXidStart(ch) && ch != 0x5F /* LOW LINE */) { return 0; } + + for (i = 1; i < len; i++) { + ch = wstr[i]; + if (!_PyUnicode_IsXidContinue(ch)) { + return 0; + } + } + return 1; } - return 1; } /*[clinic input] diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index c80f08668b07d..5f8c862c1f88b 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -337,9 +337,6 @@ tokenizer_error(Parser *p) case E_TOKEN: msg = "invalid token"; break; - case E_IDENTIFIER: - msg = "invalid character in identifier"; - break; case E_EOFS: RAISE_SYNTAX_ERROR("EOF while scanning triple-quoted string literal"); return -1; diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 0f2b6af5e50ad..b81fa118f216e 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -1101,25 +1101,53 @@ static int verify_identifier(struct tok_state *tok) { PyObject *s; - int result; if (tok->decoding_erred) return 0; s = PyUnicode_DecodeUTF8(tok->start, tok->cur - tok->start, NULL); if (s == NULL) { if (PyErr_ExceptionMatches(PyExc_UnicodeDecodeError)) { - PyErr_Clear(); - tok->done = E_IDENTIFIER; - } else { + tok->done = E_DECODE; + } + else { tok->done = E_ERROR; } return 0; } - result = PyUnicode_IsIdentifier(s); - Py_DECREF(s); - if (result == 0) { - tok->done = E_IDENTIFIER; + Py_ssize_t invalid = _PyUnicode_ScanIdentifier(s); + if (invalid < 0) { + Py_DECREF(s); + tok->done = E_ERROR; + return 0; } - return result; + assert(PyUnicode_GET_LENGTH(s) > 0); + if (invalid < PyUnicode_GET_LENGTH(s)) { + Py_UCS4 ch = PyUnicode_READ_CHAR(s, invalid); + if (invalid + 1 < PyUnicode_GET_LENGTH(s)) { + /* Determine the offset in UTF-8 encoded input */ + Py_SETREF(s, PyUnicode_Substring(s, 0, invalid + 1)); + if (s != NULL) { + Py_SETREF(s, PyUnicode_AsUTF8String(s)); + } + if (s == NULL) { + tok->done = E_ERROR; + return 0; + } + tok->cur = (char *)tok->start + PyBytes_GET_SIZE(s); + } + Py_DECREF(s); + // PyUnicode_FromFormatV() does not support %X + char hex[9]; + snprintf(hex, sizeof(hex), "%04X", ch); + if (Py_UNICODE_ISPRINTABLE(ch)) { + syntaxerror(tok, "invalid character '%c' (U+%s)", ch, hex); + } + else { + syntaxerror(tok, "invalid non-printable character U+%s", hex); + } + return 0; + } + Py_DECREF(s); + return 1; } static int diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 1b79a33c814da..45f08b707eb99 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -1603,9 +1603,6 @@ err_input(perrdetail *err) msg = "unexpected character after line continuation character"; break; - case E_IDENTIFIER: - msg = "invalid character in identifier"; - break; case E_BADSINGLE: msg = "multiple statements found while compiling a single statement"; break; From webhook-mailer at python.org Tue May 12 07:32:11 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 12 May 2020 11:32:11 -0000 Subject: [Python-checkins] bpo-40602: Optimize _Py_hashtable for pointer keys (GH-20051) Message-ID: https://github.com/python/cpython/commit/7c6e97077525f0ad3cfa0971028313b9079449fd commit: 7c6e97077525f0ad3cfa0971028313b9079449fd branch: master author: Victor Stinner committer: GitHub date: 2020-05-12T13:31:59+02:00 summary: bpo-40602: Optimize _Py_hashtable for pointer keys (GH-20051) Optimize _Py_hashtable_get() and _Py_hashtable_get_entry() for pointer keys: * key_size == sizeof(void*) * hash_func == _Py_hashtable_hash_ptr * compare_func == _Py_hashtable_compare_direct Changes: * Add get_func and get_entry_func members to _Py_hashtable_t * Convert _Py_hashtable_get() and _Py_hashtable_get_entry() functions to static nline functions. * Add specialized get and get entry for pointer keys. files: M Include/internal/pycore_hashtable.h M Python/hashtable.c diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 585f76b51d711..6e094e94376ad 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -76,12 +76,17 @@ typedef struct { /* Forward declaration */ struct _Py_hashtable_t; +typedef struct _Py_hashtable_t _Py_hashtable_t; -typedef Py_uhash_t (*_Py_hashtable_hash_func) (struct _Py_hashtable_t *ht, +typedef Py_uhash_t (*_Py_hashtable_hash_func) (_Py_hashtable_t *ht, const void *pkey); -typedef int (*_Py_hashtable_compare_func) (struct _Py_hashtable_t *ht, +typedef int (*_Py_hashtable_compare_func) (_Py_hashtable_t *ht, const void *pkey, const _Py_hashtable_entry_t *he); +typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht, + const void *pkey); +typedef int (*_Py_hashtable_get_func) (_Py_hashtable_t *ht, + const void *pkey, void *data); typedef struct { /* allocate a memory block */ @@ -93,18 +98,19 @@ typedef struct { /* _Py_hashtable: table */ - -typedef struct _Py_hashtable_t { +struct _Py_hashtable_t { size_t num_buckets; size_t entries; /* Total number of entries in the table. */ _Py_slist_t *buckets; size_t key_size; size_t data_size; + _Py_hashtable_get_func get_func; + _Py_hashtable_get_entry_func get_entry_func; _Py_hashtable_hash_func hash_func; _Py_hashtable_compare_func compare_func; _Py_hashtable_allocator_t alloc; -} _Py_hashtable_t; +}; /* hash a pointer (void*) */ PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr( @@ -176,10 +182,12 @@ PyAPI_FUNC(int) _Py_hashtable_set( Don't call directly this function, but use _Py_HASHTABLE_GET_ENTRY() macro */ -PyAPI_FUNC(_Py_hashtable_entry_t*) _Py_hashtable_get_entry( - _Py_hashtable_t *ht, - size_t key_size, - const void *pkey); +static inline _Py_hashtable_entry_t * +_Py_hashtable_get_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey) +{ + assert(key_size == ht->key_size); + return ht->get_entry_func(ht, pkey); +} #define _Py_HASHTABLE_GET_ENTRY(TABLE, KEY) \ _Py_hashtable_get_entry(TABLE, sizeof(KEY), &(KEY)) @@ -189,12 +197,14 @@ PyAPI_FUNC(_Py_hashtable_entry_t*) _Py_hashtable_get_entry( exists, return 0 if the entry does not exist. Don't call directly this function, but use _Py_HASHTABLE_GET() macro */ -PyAPI_FUNC(int) _Py_hashtable_get( - _Py_hashtable_t *ht, - size_t key_size, - const void *pkey, - size_t data_size, - void *data); +static inline int +_Py_hashtable_get(_Py_hashtable_t *ht, size_t key_size, const void *pkey, + size_t data_size, void *data) +{ + assert(key_size == ht->key_size); + assert(data_size == ht->data_size); + return ht->get_func(ht, pkey, data); +} #define _Py_HASHTABLE_GET(TABLE, KEY, DATA) \ _Py_hashtable_get(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) diff --git a/Python/hashtable.c b/Python/hashtable.c index e9f02d8650e4f..1548c2e4618c3 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -108,7 +108,6 @@ Py_uhash_t _Py_hashtable_hash_ptr(struct _Py_hashtable_t *ht, const void *pkey) { void *key; - _Py_HASHTABLE_READ_KEY(ht, pkey, key); return (Py_uhash_t)_Py_HashPointer(key); } @@ -137,61 +136,6 @@ round_size(size_t s) } -_Py_hashtable_t * -_Py_hashtable_new_full(size_t key_size, size_t data_size, - size_t init_size, - _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func, - _Py_hashtable_allocator_t *allocator) -{ - _Py_hashtable_t *ht; - size_t buckets_size; - _Py_hashtable_allocator_t alloc; - - if (allocator == NULL) { - alloc.malloc = PyMem_Malloc; - alloc.free = PyMem_Free; - } - else { - alloc = *allocator; - } - - ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); - if (ht == NULL) - return ht; - - ht->num_buckets = round_size(init_size); - ht->entries = 0; - ht->key_size = key_size; - ht->data_size = data_size; - - buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); - ht->buckets = alloc.malloc(buckets_size); - if (ht->buckets == NULL) { - alloc.free(ht); - return NULL; - } - memset(ht->buckets, 0, buckets_size); - - ht->hash_func = hash_func; - ht->compare_func = compare_func; - ht->alloc = alloc; - return ht; -} - - -_Py_hashtable_t * -_Py_hashtable_new(size_t key_size, size_t data_size, - _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func) -{ - return _Py_hashtable_new_full(key_size, data_size, - HASHTABLE_MIN_SIZE, - hash_func, compare_func, - NULL); -} - - size_t _Py_hashtable_size(_Py_hashtable_t *ht) { @@ -251,23 +195,20 @@ _Py_hashtable_print_stats(_Py_hashtable_t *ht) _Py_hashtable_entry_t * -_Py_hashtable_get_entry(_Py_hashtable_t *ht, - size_t key_size, const void *pkey) +_Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *pkey) { - Py_uhash_t key_hash; - size_t index; - _Py_hashtable_entry_t *entry; - - assert(key_size == ht->key_size); - - key_hash = ht->hash_func(ht, pkey); - index = key_hash & (ht->num_buckets - 1); - - for (entry = TABLE_HEAD(ht, index); entry != NULL; entry = ENTRY_NEXT(entry)) { - if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) + Py_uhash_t key_hash = ht->hash_func(ht, pkey); + size_t index = key_hash & (ht->num_buckets - 1); + _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + while (1) { + if (entry == NULL) { + return NULL; + } + if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) { break; + } + entry = ENTRY_NEXT(entry); } - return entry; } @@ -324,7 +265,7 @@ _Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, /* Don't write the assertion on a single line because it is interesting to know the duplicated entry if the assertion failed. The entry can be read using a debugger. */ - entry = _Py_hashtable_get_entry(ht, key_size, pkey); + entry = ht->get_entry_func(ht, pkey); assert(entry == NULL); #endif @@ -352,18 +293,62 @@ _Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, int -_Py_hashtable_get(_Py_hashtable_t *ht, size_t key_size,const void *pkey, - size_t data_size, void *data) +_Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *pkey, void *data) { - _Py_hashtable_entry_t *entry; - assert(data != NULL); + _Py_hashtable_entry_t *entry = ht->get_entry_func(ht, pkey); + if (entry != NULL) { + ENTRY_READ_PDATA(ht, entry, ht->data_size, data); + return 1; + } + else { + return 0; + } +} - entry = _Py_hashtable_get_entry(ht, key_size, pkey); - if (entry == NULL) + +// Specialized for: +// key_size == sizeof(void*) +// hash_func == _Py_hashtable_hash_ptr +// compare_func == _Py_hashtable_compare_direct +_Py_hashtable_entry_t * +_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *pkey) +{ + Py_uhash_t key_hash = _Py_hashtable_hash_ptr(ht, pkey); + size_t index = key_hash & (ht->num_buckets - 1); + _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + while (1) { + if (entry == NULL) { + return NULL; + } + if (entry->key_hash == key_hash) { + const void *pkey2 = _Py_HASHTABLE_ENTRY_PKEY(entry); + if (memcmp(pkey, pkey2, sizeof(void*)) == 0) { + break; + } + } + entry = ENTRY_NEXT(entry); + } + return entry; +} + + +// Specialized for: +// key_size == sizeof(void*) +// hash_func == _Py_hashtable_hash_ptr +// compare_func == _Py_hashtable_compare_direct +int +_Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *pkey, void *data) +{ + assert(data != NULL); + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry_ptr(ht, pkey); + if (entry != NULL) { + ENTRY_READ_PDATA(ht, entry, ht->data_size, data); + return 1; + } + else { return 0; - ENTRY_READ_PDATA(ht, entry, data_size, data); - return 1; + } } @@ -454,6 +439,70 @@ hashtable_rehash(_Py_hashtable_t *ht) } +_Py_hashtable_t * +_Py_hashtable_new_full(size_t key_size, size_t data_size, + size_t init_size, + _Py_hashtable_hash_func hash_func, + _Py_hashtable_compare_func compare_func, + _Py_hashtable_allocator_t *allocator) +{ + _Py_hashtable_t *ht; + size_t buckets_size; + _Py_hashtable_allocator_t alloc; + + if (allocator == NULL) { + alloc.malloc = PyMem_Malloc; + alloc.free = PyMem_Free; + } + else { + alloc = *allocator; + } + + ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); + if (ht == NULL) + return ht; + + ht->num_buckets = round_size(init_size); + ht->entries = 0; + ht->key_size = key_size; + ht->data_size = data_size; + + buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); + ht->buckets = alloc.malloc(buckets_size); + if (ht->buckets == NULL) { + alloc.free(ht); + return NULL; + } + memset(ht->buckets, 0, buckets_size); + + ht->get_func = _Py_hashtable_get_generic; + ht->get_entry_func = _Py_hashtable_get_entry_generic; + ht->hash_func = hash_func; + ht->compare_func = compare_func; + ht->alloc = alloc; + if (ht->key_size == sizeof(void*) + && ht->hash_func == _Py_hashtable_hash_ptr + && ht->compare_func == _Py_hashtable_compare_direct) + { + ht->get_func = _Py_hashtable_get_ptr; + ht->get_entry_func = _Py_hashtable_get_entry_ptr; + } + return ht; +} + + +_Py_hashtable_t * +_Py_hashtable_new(size_t key_size, size_t data_size, + _Py_hashtable_hash_func hash_func, + _Py_hashtable_compare_func compare_func) +{ + return _Py_hashtable_new_full(key_size, data_size, + HASHTABLE_MIN_SIZE, + hash_func, compare_func, + NULL); +} + + void _Py_hashtable_clear(_Py_hashtable_t *ht) { From webhook-mailer at python.org Tue May 12 09:18:11 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Tue, 12 May 2020 13:18:11 -0000 Subject: [Python-checkins] bpo-40596: Fix str.isidentifier() for non-canonicalized strings containing non-BMP characters on Windows. (GH-20053) Message-ID: https://github.com/python/cpython/commit/5650e76f63a6f4ec55d00ec13f143d84a2efee39 commit: 5650e76f63a6f4ec55d00ec13f143d84a2efee39 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-12T16:18:00+03:00 summary: bpo-40596: Fix str.isidentifier() for non-canonicalized strings containing non-BMP characters on Windows. (GH-20053) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst M Lib/test/test_unicode.py M Objects/unicodeobject.c diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 2839889646789..2ee4e64d63530 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -720,6 +720,13 @@ def test_isidentifier(self): self.assertFalse("?".isidentifier()) self.assertFalse("0".isidentifier()) + @support.cpython_only + def test_isidentifier_legacy(self): + import _testcapi + u = '???????' + self.assertTrue(u.isidentifier()) + self.assertTrue(_testcapi.unicode_legacy_string(u).isidentifier()) + def test_isprintable(self): self.assertTrue("".isprintable()) self.assertTrue(" ".isprintable()) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst new file mode 100644 index 0000000000000..1252db4dc9848 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst @@ -0,0 +1,2 @@ +Fixed :meth:`str.isidentifier` for non-canonicalized strings containing +non-BMP characters on Windows. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 276547ca48a5b..826298c23a924 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -12356,20 +12356,38 @@ PyUnicode_IsIdentifier(PyObject *self) return len && i == len; } else { - Py_ssize_t i, len = PyUnicode_GET_SIZE(self); + Py_ssize_t i = 0, len = PyUnicode_GET_SIZE(self); if (len == 0) { /* an empty string is not a valid identifier */ return 0; } const wchar_t *wstr = _PyUnicode_WSTR(self); - Py_UCS4 ch = wstr[0]; + Py_UCS4 ch = wstr[i++]; +#if SIZEOF_WCHAR_T == 2 + if (Py_UNICODE_IS_HIGH_SURROGATE(ch) + && i < len + && Py_UNICODE_IS_LOW_SURROGATE(wstr[i])) + { + ch = Py_UNICODE_JOIN_SURROGATES(ch, wstr[i]); + i++; + } +#endif if (!_PyUnicode_IsXidStart(ch) && ch != 0x5F /* LOW LINE */) { return 0; } - for (i = 1; i < len; i++) { - ch = wstr[i]; + while (i < len) { + ch = wstr[i++]; +#if SIZEOF_WCHAR_T == 2 + if (Py_UNICODE_IS_HIGH_SURROGATE(ch) + && i < len + && Py_UNICODE_IS_LOW_SURROGATE(wstr[i])) + { + ch = Py_UNICODE_JOIN_SURROGATES(ch, wstr[i]); + i++; + } +#endif if (!_PyUnicode_IsXidContinue(ch)) { return 0; } From webhook-mailer at python.org Tue May 12 10:12:46 2020 From: webhook-mailer at python.org (scoder) Date: Tue, 12 May 2020 14:12:46 -0000 Subject: [Python-checkins] bpo-38787: Add PyCFunction_CheckExact() macro for exact type checks (GH-20024) Message-ID: https://github.com/python/cpython/commit/4c9ea093cd752a6687864674d34250653653f743 commit: 4c9ea093cd752a6687864674d34250653653f743 branch: master author: scoder committer: GitHub date: 2020-05-12T07:12:41-07:00 summary: bpo-38787: Add PyCFunction_CheckExact() macro for exact type checks (GH-20024) ? now that we allow subtypes of PyCFunction. Also add PyCMethod_CheckExact() and PyCMethod_Check() for checks against the PyCMethod subtype. files: A Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst M Include/cpython/methodobject.h M Include/methodobject.h M Objects/abstract.c M Python/ceval.c diff --git a/Include/cpython/methodobject.h b/Include/cpython/methodobject.h index 2ac2cbf36aa79..7ecbfe3b5e2fe 100644 --- a/Include/cpython/methodobject.h +++ b/Include/cpython/methodobject.h @@ -4,6 +4,9 @@ PyAPI_DATA(PyTypeObject) PyCMethod_Type; +#define PyCMethod_CheckExact(op) Py_IS_TYPE(op, &PyCMethod_Type) +#define PyCMethod_Check(op) PyObject_TypeCheck(op, &PyCMethod_Type) + /* Macros for direct access to these values. Type checks are *not* done, so use with care. */ #define PyCFunction_GET_FUNCTION(func) \ diff --git a/Include/methodobject.h b/Include/methodobject.h index 7c7362cded35b..12e049b4043ba 100644 --- a/Include/methodobject.h +++ b/Include/methodobject.h @@ -13,7 +13,8 @@ extern "C" { PyAPI_DATA(PyTypeObject) PyCFunction_Type; -#define PyCFunction_Check(op) (Py_IS_TYPE(op, &PyCFunction_Type) || (PyType_IsSubtype(Py_TYPE(op), &PyCFunction_Type))) +#define PyCFunction_CheckExact(op) Py_IS_TYPE(op, &PyCFunction_Type) +#define PyCFunction_Check(op) PyObject_TypeCheck(op, &PyCFunction_Type) typedef PyObject *(*PyCFunction)(PyObject *, PyObject *); typedef PyObject *(*_PyCFunctionFast) (PyObject *, PyObject *const *, Py_ssize_t); diff --git a/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst b/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst new file mode 100644 index 0000000000000..f80be666c1c20 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst @@ -0,0 +1,2 @@ +Add PyCFunction_CheckExact() macro for exact type checks now that we allow subtypes of PyCFunction, +as well as PyCMethod_CheckExact() and PyCMethod_Check() for the new PyCMethod subtype. diff --git a/Objects/abstract.c b/Objects/abstract.c index b014f79e8d0fb..5b85b014bd22e 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -900,7 +900,7 @@ binary_op(PyObject *v, PyObject *w, const int op_slot, const char *op_name) Py_DECREF(result); if (op_slot == NB_SLOT(nb_rshift) && - PyCFunction_Check(v) && + PyCFunction_CheckExact(v) && strcmp(((PyCFunctionObject *)v)->m_ml->ml_name, "print") == 0) { PyErr_Format(PyExc_TypeError, diff --git a/Python/ceval.c b/Python/ceval.c index e54e344a5fd51..699ad86a365b1 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -5054,7 +5054,7 @@ trace_call_function(PyThreadState *tstate, PyObject *kwnames) { PyObject *x; - if (PyCFunction_Check(func)) { + if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) { C_TRACE(x, PyObject_Vectorcall(func, args, nargs, kwnames)); return x; } @@ -5115,7 +5115,7 @@ do_call_core(PyThreadState *tstate, PyObject *func, PyObject *callargs, PyObject { PyObject *result; - if (PyCFunction_Check(func)) { + if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) { C_TRACE(result, PyObject_Call(func, callargs, kwdict)); return result; } From webhook-mailer at python.org Tue May 12 12:46:28 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 12 May 2020 16:46:28 -0000 Subject: [Python-checkins] bpo-40602: Add _Py_HashPointerRaw() function (GH-20056) Message-ID: https://github.com/python/cpython/commit/f453221c8b80e0570066a9375337f208d50e6406 commit: f453221c8b80e0570066a9375337f208d50e6406 branch: master author: Victor Stinner committer: GitHub date: 2020-05-12T18:46:20+02:00 summary: bpo-40602: Add _Py_HashPointerRaw() function (GH-20056) Add a new _Py_HashPointerRaw() function which avoids replacing -1 with -2 to micro-optimize hash table using pointer keys: using _Py_hashtable_hash_ptr() hash function. files: M Include/pyhash.h M Python/hashtable.c M Python/pyhash.c diff --git a/Include/pyhash.h b/Include/pyhash.h index 2f398589cee7e..4437b870332bd 100644 --- a/Include/pyhash.h +++ b/Include/pyhash.h @@ -9,6 +9,8 @@ extern "C" { #ifndef Py_LIMITED_API PyAPI_FUNC(Py_hash_t) _Py_HashDouble(double); PyAPI_FUNC(Py_hash_t) _Py_HashPointer(const void*); +// Similar to _Py_HashPointer(), but don't replace -1 with -2 +PyAPI_FUNC(Py_hash_t) _Py_HashPointerRaw(const void*); PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void*, Py_ssize_t); #endif diff --git a/Python/hashtable.c b/Python/hashtable.c index 1548c2e4618c3..90fe34e628016 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -109,7 +109,7 @@ _Py_hashtable_hash_ptr(struct _Py_hashtable_t *ht, const void *pkey) { void *key; _Py_HASHTABLE_READ_KEY(ht, pkey, key); - return (Py_uhash_t)_Py_HashPointer(key); + return (Py_uhash_t)_Py_HashPointerRaw(key); } diff --git a/Python/pyhash.c b/Python/pyhash.c index a6f42e71cf643..3843079fbbce1 100644 --- a/Python/pyhash.c +++ b/Python/pyhash.c @@ -129,16 +129,22 @@ _Py_HashDouble(double v) } Py_hash_t -_Py_HashPointer(const void *p) +_Py_HashPointerRaw(const void *p) { - Py_hash_t x; size_t y = (size_t)p; /* bottom 3 or 4 bits are likely to be 0; rotate y by 4 to avoid excessive hash collisions for dicts and sets */ y = (y >> 4) | (y << (8 * SIZEOF_VOID_P - 4)); - x = (Py_hash_t)y; - if (x == -1) + return (Py_hash_t)y; +} + +Py_hash_t +_Py_HashPointer(const void *p) +{ + Py_hash_t x = _Py_HashPointerRaw(p); + if (x == -1) { x = -2; + } return x; } From webhook-mailer at python.org Tue May 12 18:32:50 2020 From: webhook-mailer at python.org (Steve Dower) Date: Tue, 12 May 2020 22:32:50 -0000 Subject: [Python-checkins] bpo-40501: Replace ctypes code in uuid with native module (GH-19948) Message-ID: https://github.com/python/cpython/commit/d6b727e2c947240804b8e434b305ba2890122550 commit: d6b727e2c947240804b8e434b305ba2890122550 branch: master author: Steve Dower committer: GitHub date: 2020-05-12T23:32:32+01:00 summary: bpo-40501: Replace ctypes code in uuid with native module (GH-19948) files: A Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst A PCbuild/_uuid.vcxproj A PCbuild/_uuid.vcxproj.filters M Lib/test/test_uuid.py M Lib/uuid.py M Modules/_uuidmodule.c M PCbuild/pcbuild.proj M PCbuild/pcbuild.sln M Tools/msi/lib/lib_files.wxs diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py index ac166ced38afb..b1c92427dd270 100644 --- a/Lib/test/test_uuid.py +++ b/Lib/test/test_uuid.py @@ -852,17 +852,6 @@ def test_netstat_getnode(self): node = self.uuid._netstat_getnode() self.check_node(node, 'netstat') - @unittest.skipUnless(os.name == 'nt', 'requires Windows') - def test_ipconfig_getnode(self): - node = self.uuid._ipconfig_getnode() - self.check_node(node, 'ipconfig') - - @unittest.skipUnless(importable('win32wnet'), 'requires win32wnet') - @unittest.skipUnless(importable('netbios'), 'requires netbios') - def test_netbios_getnode(self): - node = self.uuid._netbios_getnode() - self.check_node(node) - def test_random_getnode(self): node = self.uuid._random_getnode() # The multicast bit, i.e. the least significant bit of first octet, @@ -874,6 +863,13 @@ def test_random_getnode(self): node2 = self.uuid._random_getnode() self.assertNotEqual(node2, node, '%012x' % node) +class TestInternalsWithoutExtModule(BaseTestInternals, unittest.TestCase): + uuid = py_uuid + + at unittest.skipUnless(c_uuid, 'requires the C _uuid module') +class TestInternalsWithExtModule(BaseTestInternals, unittest.TestCase): + uuid = c_uuid + @unittest.skipUnless(os.name == 'posix', 'requires Posix') def test_unix_getnode(self): if not importable('_uuid') and not importable('ctypes'): @@ -885,19 +881,10 @@ def test_unix_getnode(self): self.check_node(node, 'unix') @unittest.skipUnless(os.name == 'nt', 'requires Windows') - @unittest.skipUnless(importable('ctypes'), 'requires ctypes') def test_windll_getnode(self): node = self.uuid._windll_getnode() self.check_node(node) -class TestInternalsWithoutExtModule(BaseTestInternals, unittest.TestCase): - uuid = py_uuid - - at unittest.skipUnless(c_uuid, 'requires the C _uuid module') -class TestInternalsWithExtModule(BaseTestInternals, unittest.TestCase): - uuid = c_uuid - - if __name__ == '__main__': unittest.main() diff --git a/Lib/uuid.py b/Lib/uuid.py index 2799c75ba6a1a..9ddce813fc469 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -555,178 +555,44 @@ def _netstat_getnode(): return _find_mac_under_heading('netstat', '-ian', b'Address') def _ipconfig_getnode(): - """Get the hardware address on Windows by running ipconfig.exe.""" - import os, re, subprocess - first_local_mac = None - dirs = ['', r'c:\windows\system32', r'c:\winnt\system32'] - try: - import ctypes - buffer = ctypes.create_string_buffer(300) - ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300) - dirs.insert(0, buffer.value.decode('mbcs')) - except: - pass - for dir in dirs: - try: - proc = subprocess.Popen([os.path.join(dir, 'ipconfig'), '/all'], - stdout=subprocess.PIPE, - encoding="oem") - except OSError: - continue - with proc: - for line in proc.stdout: - value = line.split(':')[-1].strip().lower() - if re.fullmatch('(?:[0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value): - mac = int(value.replace('-', ''), 16) - if _is_universal(mac): - return mac - first_local_mac = first_local_mac or mac - return first_local_mac or None + """[DEPRECATED] Get the hardware address on Windows.""" + # bpo-40501: UuidCreateSequential() is now the only supported approach + return _windll_getnode() def _netbios_getnode(): - """Get the hardware address on Windows using NetBIOS calls. - See http://support.microsoft.com/kb/118623 for details.""" - import win32wnet, netbios - first_local_mac = None - ncb = netbios.NCB() - ncb.Command = netbios.NCBENUM - ncb.Buffer = adapters = netbios.LANA_ENUM() - adapters._pack() - if win32wnet.Netbios(ncb) != 0: - return None - adapters._unpack() - for i in range(adapters.length): - ncb.Reset() - ncb.Command = netbios.NCBRESET - ncb.Lana_num = ord(adapters.lana[i]) - if win32wnet.Netbios(ncb) != 0: - continue - ncb.Reset() - ncb.Command = netbios.NCBASTAT - ncb.Lana_num = ord(adapters.lana[i]) - ncb.Callname = '*'.ljust(16) - ncb.Buffer = status = netbios.ADAPTER_STATUS() - if win32wnet.Netbios(ncb) != 0: - continue - status._unpack() - bytes = status.adapter_address[:6] - if len(bytes) != 6: - continue - mac = int.from_bytes(bytes, 'big') - if _is_universal(mac): - return mac - first_local_mac = first_local_mac or mac - return first_local_mac or None + """[DEPRECATED] Get the hardware address on Windows.""" + # bpo-40501: UuidCreateSequential() is now the only supported approach + return _windll_getnode() -_generate_time_safe = _UuidCreate = None -_has_uuid_generate_time_safe = None - # Import optional C extension at toplevel, to help disabling it when testing try: import _uuid + _generate_time_safe = getattr(_uuid, "generate_time_safe", None) + _UuidCreate = getattr(_uuid, "UuidCreate", None) + _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe except ImportError: _uuid = None + _generate_time_safe = None + _UuidCreate = None + _has_uuid_generate_time_safe = None def _load_system_functions(): - """ - Try to load platform-specific functions for generating uuids. - """ - global _generate_time_safe, _UuidCreate, _has_uuid_generate_time_safe - - if _has_uuid_generate_time_safe is not None: - return - - _has_uuid_generate_time_safe = False - - if sys.platform == "darwin" and int(os.uname().release.split('.')[0]) < 9: - # The uuid_generate_* functions are broken on MacOS X 10.5, as noted - # in issue #8621 the function generates the same sequence of values - # in the parent process and all children created using fork (unless - # those children use exec as well). - # - # Assume that the uuid_generate functions are broken from 10.5 onward, - # the test can be adjusted when a later version is fixed. - pass - elif _uuid is not None: - _generate_time_safe = _uuid.generate_time_safe - _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe - return - - try: - # If we couldn't find an extension module, try ctypes to find - # system routines for UUID generation. - # Thanks to Thomas Heller for ctypes and for his help with its use here. - import ctypes - import ctypes.util - - # The uuid_generate_* routines are provided by libuuid on at least - # Linux and FreeBSD, and provided by libc on Mac OS X. - _libnames = ['uuid'] - if not sys.platform.startswith('win'): - _libnames.append('c') - for libname in _libnames: - try: - lib = ctypes.CDLL(ctypes.util.find_library(libname)) - except Exception: # pragma: nocover - continue - # Try to find the safe variety first. - if hasattr(lib, 'uuid_generate_time_safe'): - _uuid_generate_time_safe = lib.uuid_generate_time_safe - # int uuid_generate_time_safe(uuid_t out); - def _generate_time_safe(): - _buffer = ctypes.create_string_buffer(16) - res = _uuid_generate_time_safe(_buffer) - return bytes(_buffer.raw), res - _has_uuid_generate_time_safe = True - break - - elif hasattr(lib, 'uuid_generate_time'): # pragma: nocover - _uuid_generate_time = lib.uuid_generate_time - # void uuid_generate_time(uuid_t out); - _uuid_generate_time.restype = None - def _generate_time_safe(): - _buffer = ctypes.create_string_buffer(16) - _uuid_generate_time(_buffer) - return bytes(_buffer.raw), None - break - - # On Windows prior to 2000, UuidCreate gives a UUID containing the - # hardware address. On Windows 2000 and later, UuidCreate makes a - # random UUID and UuidCreateSequential gives a UUID containing the - # hardware address. These routines are provided by the RPC runtime. - # NOTE: at least on Tim's WinXP Pro SP2 desktop box, while the last - # 6 bytes returned by UuidCreateSequential are fixed, they don't appear - # to bear any relationship to the MAC address of any network device - # on the box. - try: - lib = ctypes.windll.rpcrt4 - except: - lib = None - _UuidCreate = getattr(lib, 'UuidCreateSequential', - getattr(lib, 'UuidCreate', None)) - - except Exception as exc: - import warnings - warnings.warn(f"Could not find fallback ctypes uuid functions: {exc}", - ImportWarning) + """[DEPRECATED] Platform-specific functions loaded at import time""" def _unix_getnode(): - """Get the hardware address on Unix using the _uuid extension module - or ctypes.""" - _load_system_functions() - uuid_time, _ = _generate_time_safe() - return UUID(bytes=uuid_time).node + """Get the hardware address on Unix using the _uuid extension module.""" + if _generate_time_safe: + uuid_time, _ = _generate_time_safe() + return UUID(bytes=uuid_time).node def _windll_getnode(): - """Get the hardware address on Windows using ctypes.""" - import ctypes - _load_system_functions() - _buffer = ctypes.create_string_buffer(16) - if _UuidCreate(_buffer) == 0: - return UUID(bytes=bytes_(_buffer.raw)).node + """Get the hardware address on Windows using the _uuid extension module.""" + if _UuidCreate: + uuid_bytes = _UuidCreate() + return UUID(bytes_le=uuid_bytes).node def _random_getnode(): """Get a random node ID.""" @@ -755,7 +621,8 @@ def _random_getnode(): elif _DARWIN: _OS_GETTERS = [_ifconfig_getnode, _arp_getnode, _netstat_getnode] elif _WINDOWS: - _OS_GETTERS = [_netbios_getnode, _ipconfig_getnode] + # bpo-40201: _windll_getnode will always succeed, so these are not needed + _OS_GETTERS = [] elif _AIX: _OS_GETTERS = [_netstat_getnode] else: @@ -802,7 +669,6 @@ def uuid1(node=None, clock_seq=None): # When the system provides a version-1 UUID generator, use it (but don't # use UuidCreate here because its UUIDs don't conform to RFC 4122). - _load_system_functions() if _generate_time_safe is not None and node is clock_seq is None: uuid_time, safely_generated = _generate_time_safe() try: diff --git a/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst b/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst new file mode 100644 index 0000000000000..5ce22eb8a92ee --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst @@ -0,0 +1,2 @@ +:mod:`uuid` no longer uses :mod:`ctypes` to load :file:`libuuid` or +:file:`rpcrt4.dll` at runtime. diff --git a/Modules/_uuidmodule.c b/Modules/_uuidmodule.c index 3be6c848ad645..3f33e22a055c6 100644 --- a/Modules/_uuidmodule.c +++ b/Modules/_uuidmodule.c @@ -1,5 +1,5 @@ /* - * Python UUID module that wraps libuuid - + * Python UUID module that wraps libuuid or Windows rpcrt4.dll. * DCE compatible Universally Unique Identifier library. */ @@ -12,6 +12,12 @@ #include #endif +#ifdef MS_WINDOWS +#include +#endif + +#ifndef MS_WINDOWS + static PyObject * py_uuid_generate_time_safe(PyObject *Py_UNUSED(context), PyObject *Py_UNUSED(ignored)) @@ -31,17 +37,50 @@ py_uuid_generate_time_safe(PyObject *Py_UNUSED(context), return Py_BuildValue("y#i", buf, sizeof(uuid), (int) status); # else return Py_BuildValue("y#i", (const char *) &uuid, sizeof(uuid), (int) status); -# endif -#else +# endif /* HAVE_UUID_CREATE */ +#else /* HAVE_UUID_GENERATE_TIME_SAFE */ uuid_generate_time(uuid); return Py_BuildValue("y#O", (const char *) uuid, sizeof(uuid), Py_None); -#endif +#endif /* HAVE_UUID_GENERATE_TIME_SAFE */ } +#else /* MS_WINDOWS */ + +static PyObject * +py_UuidCreate(PyObject *Py_UNUSED(context), + PyObject *Py_UNUSED(ignored)) +{ + UUID uuid; + RPC_STATUS res; + + Py_BEGIN_ALLOW_THREADS + res = UuidCreateSequential(&uuid); + Py_END_ALLOW_THREADS + + switch (res) { + case RPC_S_OK: + case RPC_S_UUID_LOCAL_ONLY: + case RPC_S_UUID_NO_ADDRESS: + /* + All success codes, but the latter two indicate that the UUID is random + rather than based on the MAC address. If the OS can't figure this out, + neither can we, so we'll take it anyway. + */ + return Py_BuildValue("y#", (const char *)&uuid, sizeof(uuid)); + } + PyErr_SetFromWindowsErr(res); + return NULL; +} + +#endif /* MS_WINDOWS */ + + static int uuid_exec(PyObject *module) { assert(sizeof(uuid_t) == 16); -#ifdef HAVE_UUID_GENERATE_TIME_SAFE +#if defined(MS_WINDOWS) + int has_uuid_generate_time_safe = 0; +#elif defined(HAVE_UUID_GENERATE_TIME_SAFE) int has_uuid_generate_time_safe = 1; #else int has_uuid_generate_time_safe = 0; @@ -54,7 +93,12 @@ uuid_exec(PyObject *module) { } static PyMethodDef uuid_methods[] = { +#if defined(HAVE_UUID_UUID_H) || defined(HAVE_UUID_H) {"generate_time_safe", py_uuid_generate_time_safe, METH_NOARGS, NULL}, +#endif +#if defined(MS_WINDOWS) + {"UuidCreate", py_UuidCreate, METH_NOARGS, NULL}, +#endif {NULL, NULL, 0, NULL} /* sentinel */ }; diff --git a/PCbuild/_uuid.vcxproj b/PCbuild/_uuid.vcxproj new file mode 100644 index 0000000000000..2437b7eb2d939 --- /dev/null +++ b/PCbuild/_uuid.vcxproj @@ -0,0 +1,115 @@ +? + + + + Debug + ARM + + + Debug + ARM64 + + + Debug + Win32 + + + Debug + x64 + + + PGInstrument + ARM + + + PGInstrument + ARM64 + + + PGInstrument + Win32 + + + PGInstrument + x64 + + + PGUpdate + ARM + + + PGUpdate + ARM64 + + + PGUpdate + Win32 + + + PGUpdate + x64 + + + Release + ARM + + + Release + ARM64 + + + Release + Win32 + + + Release + x64 + + + + {CB435430-EBB1-478B-8F4E-C256F6838F55} + _uuid + Win32Proj + false + + + + + DynamicLibrary + NotSet + + + + .pyd + + + + + + + + + + <_ProjectFileVersion>10.0.30319.1 + + + + rpcrt4.lib;%(AdditionalDependencies) + + + + + + + + + + + {cf7ac3d1-e2df-41d2-bea6-1e2556cdea26} + false + + + + + + \ No newline at end of file diff --git a/PCbuild/_uuid.vcxproj.filters b/PCbuild/_uuid.vcxproj.filters new file mode 100644 index 0000000000000..1794929231434 --- /dev/null +++ b/PCbuild/_uuid.vcxproj.filters @@ -0,0 +1,14 @@ +? + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + + + Source Files + + + \ No newline at end of file diff --git a/PCbuild/pcbuild.proj b/PCbuild/pcbuild.proj index 22a9eed18d42b..9c4d352b43448 100644 --- a/PCbuild/pcbuild.proj +++ b/PCbuild/pcbuild.proj @@ -51,7 +51,7 @@ - + diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index 6dc0139bc42af..6d4c9506e5ec1 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -1,6 +1,6 @@ Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 15 -VisualStudioVersion = 15.0.27130.2024 +# Visual Studio Version 16 +VisualStudioVersion = 16.0.30028.174 MinimumVisualStudioVersion = 10.0.40219.1 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{553EC33E-9816-4996-A660-5D6186A0B0B3}" ProjectSection(SolutionItems) = preProject @@ -103,6 +103,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "venvwlauncher", "venvwlaunc EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythonw_uwp", "pythonw_uwp.vcxproj", "{AB603547-1E2A-45B3-9E09-B04596006393}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_uuid", "_uuid.vcxproj", "{CB435430-EBB1-478B-8F4E-C256F6838F55}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|ARM = Debug|ARM @@ -1440,6 +1442,38 @@ Global {AB603547-1E2A-45B3-9E09-B04596006393}.Release|Win32.Build.0 = Release|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.Release|x64.ActiveCfg = Release|x64 {AB603547-1E2A-45B3-9E09-B04596006393}.Release|x64.Build.0 = Release|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM.ActiveCfg = Debug|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM.Build.0 = Debug|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM64.ActiveCfg = Debug|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM64.Build.0 = Debug|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|Win32.ActiveCfg = Debug|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|Win32.Build.0 = Debug|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|x64.ActiveCfg = Debug|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|x64.Build.0 = Debug|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM.ActiveCfg = PGInstrument|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM.Build.0 = PGInstrument|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM64.ActiveCfg = PGInstrument|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM64.Build.0 = PGInstrument|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|Win32.ActiveCfg = PGInstrument|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|Win32.Build.0 = PGInstrument|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|x64.ActiveCfg = PGInstrument|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|x64.Build.0 = PGInstrument|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM.ActiveCfg = PGUpdate|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM.Build.0 = PGUpdate|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM64.ActiveCfg = PGUpdate|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM64.Build.0 = PGUpdate|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|Win32.ActiveCfg = PGUpdate|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|Win32.Build.0 = PGUpdate|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|x64.ActiveCfg = PGUpdate|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|x64.Build.0 = PGUpdate|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM.ActiveCfg = Release|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM.Build.0 = Release|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM64.ActiveCfg = Release|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM64.Build.0 = Release|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|Win32.ActiveCfg = Release|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|Win32.Build.0 = Release|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|x64.ActiveCfg = Release|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|x64.Build.0 = Release|x64 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/Tools/msi/lib/lib_files.wxs b/Tools/msi/lib/lib_files.wxs index b462372512f6d..95541599b9bb2 100644 --- a/Tools/msi/lib/lib_files.wxs +++ b/Tools/msi/lib/lib_files.wxs @@ -1,6 +1,6 @@ ? - + From webhook-mailer at python.org Tue May 12 18:54:23 2020 From: webhook-mailer at python.org (Allen Guo) Date: Tue, 12 May 2020 22:54:23 -0000 Subject: [Python-checkins] Fix Wikipedia link (GH-20031) Message-ID: https://github.com/python/cpython/commit/3d54211e6eddc2f2586b9a20543754947c7ad325 commit: 3d54211e6eddc2f2586b9a20543754947c7ad325 branch: master author: Allen Guo committer: GitHub date: 2020-05-12T19:54:18-03:00 summary: Fix Wikipedia link (GH-20031) files: M Doc/library/functools.rst diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 204e66ae5ac40..a44eb85b27dba 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -158,11 +158,11 @@ The :mod:`functools` module defines the following functions: bypassing the cache, or for rewrapping the function with a different cache. An `LRU (least recently used) cache - `_ works - best when the most recent calls are the best predictors of upcoming calls (for - example, the most popular articles on a news server tend to change each day). - The cache's size limit assures that the cache does not grow without bound on - long-running processes such as web servers. + `_ + works best when the most recent calls are the best predictors of upcoming + calls (for example, the most popular articles on a news server tend to + change each day). The cache's size limit assures that the cache does not + grow without bound on long-running processes such as web servers. In general, the LRU cache should only be used when you want to reuse previously computed values. Accordingly, it doesn't make sense to cache From webhook-mailer at python.org Tue May 12 19:37:08 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 12 May 2020 23:37:08 -0000 Subject: [Python-checkins] bpo-40609: Rewrite how _tracemalloc handles domains (GH-20059) Message-ID: https://github.com/python/cpython/commit/9e2ca1742076169089b818d0883688a2ddd9964a commit: 9e2ca1742076169089b818d0883688a2ddd9964a branch: master author: Victor Stinner committer: GitHub date: 2020-05-13T01:36:47+02:00 summary: bpo-40609: Rewrite how _tracemalloc handles domains (GH-20059) Rewrite how the _tracemalloc module stores traces of other domains. Rather than storing the domain inside the key, it now uses a new hash table with the domain as the key, and the data is a per-domain traces hash table. * Add tracemalloc_domain hash table. * Remove _Py_tracemalloc_config.use_domain. * Remove pointer_t and related functions. files: M Include/internal/pycore_pymem.h M Modules/_tracemalloc.c diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index 18203e30f5cfe..3d925e2250d25 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -88,17 +88,12 @@ struct _PyTraceMalloc_Config { /* limit of the number of frames in a traceback, 1 by default. Variable protected by the GIL. */ int max_nframe; - - /* use domain in trace key? - Variable protected by the GIL. */ - int use_domain; }; #define _PyTraceMalloc_Config_INIT \ {.initialized = TRACEMALLOC_NOT_INITIALIZED, \ .tracing = 0, \ - .max_nframe = 1, \ - .use_domain = 0} + .max_nframe = 1} PyAPI_DATA(struct _PyTraceMalloc_Config) _Py_tracemalloc_config; diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index f22338166d0dc..7e31abe05fb6b 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -47,16 +47,6 @@ static PyThread_type_lock tables_lock; #define DEFAULT_DOMAIN 0 -/* Pack the frame_t structure to reduce the memory footprint. */ -typedef struct -#ifdef __GNUC__ -__attribute__((packed)) -#endif -{ - uintptr_t ptr; - unsigned int domain; -} pointer_t; - /* Pack the frame_t structure to reduce the memory footprint on 64-bit architectures: 12 bytes instead of 16. */ typedef struct @@ -133,6 +123,10 @@ static _Py_hashtable_t *tracemalloc_tracebacks = NULL; Protected by TABLES_LOCK(). */ static _Py_hashtable_t *tracemalloc_traces = NULL; +/* domain (unsigned int) => traces (_Py_hashtable_t). + Protected by TABLES_LOCK(). */ +static _Py_hashtable_t *tracemalloc_domains = NULL; + #ifdef TRACE_DEBUG static void @@ -235,32 +229,11 @@ hashtable_compare_unicode(_Py_hashtable_t *ht, const void *pkey, static Py_uhash_t -hashtable_hash_pointer_t(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_uint(_Py_hashtable_t *ht, const void *pkey) { - pointer_t ptr; - Py_uhash_t hash; - - _Py_HASHTABLE_READ_KEY(ht, pkey, ptr); - - hash = (Py_uhash_t)_Py_HashPointer((void*)ptr.ptr); - hash ^= ptr.domain; - return hash; -} - - -static int -hashtable_compare_pointer_t(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) -{ - pointer_t ptr1, ptr2; - - _Py_HASHTABLE_READ_KEY(ht, pkey, ptr1); - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, ptr2); - - /* compare pointer before domain, because pointer is more likely to be - different */ - return (ptr1.ptr == ptr2.ptr && ptr1.domain == ptr2.domain); - + unsigned int key; + _Py_HASHTABLE_READ_KEY(ht, pkey, key); + return (Py_uhash_t)key; } @@ -501,77 +474,74 @@ traceback_new(void) } -static int -tracemalloc_use_domain_cb(_Py_hashtable_t *old_traces, - _Py_hashtable_entry_t *entry, void *user_data) +static _Py_hashtable_t* +tracemalloc_create_traces_table(void) { - uintptr_t ptr; - pointer_t key; - _Py_hashtable_t *new_traces = (_Py_hashtable_t *)user_data; - const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(old_traces, entry); + return hashtable_new(sizeof(uintptr_t), + sizeof(trace_t), + _Py_hashtable_hash_ptr, + _Py_hashtable_compare_direct); +} - _Py_HASHTABLE_ENTRY_READ_KEY(old_traces, entry, ptr); - key.ptr = ptr; - key.domain = DEFAULT_DOMAIN; - return _Py_hashtable_set(new_traces, - sizeof(key), &key, - old_traces->data_size, pdata); +static _Py_hashtable_t* +tracemalloc_create_domains_table(void) +{ + return hashtable_new(sizeof(unsigned int), + sizeof(_Py_hashtable_t *), + hashtable_hash_uint, + _Py_hashtable_compare_direct); } -/* Convert tracemalloc_traces from compact key (uintptr_t) to pointer_t key. - * Return 0 on success, -1 on error. */ static int -tracemalloc_use_domain(void) +tracemalloc_destroy_domains_cb(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *user_data) { - _Py_hashtable_t *new_traces = NULL; - - assert(!_Py_tracemalloc_config.use_domain); - - new_traces = hashtable_new(sizeof(pointer_t), - sizeof(trace_t), - hashtable_hash_pointer_t, - hashtable_compare_pointer_t); - if (new_traces == NULL) { - return -1; - } + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + _Py_hashtable_destroy(traces); + return 0; +} - if (_Py_hashtable_foreach(tracemalloc_traces, tracemalloc_use_domain_cb, - new_traces) < 0) - { - _Py_hashtable_destroy(new_traces); - return -1; - } - _Py_hashtable_destroy(tracemalloc_traces); - tracemalloc_traces = new_traces; +static void +tracemalloc_destroy_domains(_Py_hashtable_t *domains) +{ + _Py_hashtable_foreach(domains, tracemalloc_destroy_domains_cb, NULL); + _Py_hashtable_destroy(domains); +} - _Py_tracemalloc_config.use_domain = 1; - return 0; +static _Py_hashtable_t* +tracemalloc_get_traces_table(unsigned int domain) +{ + if (domain == DEFAULT_DOMAIN) { + return tracemalloc_traces; + } + else { + _Py_hashtable_t *traces = NULL; + (void)_Py_HASHTABLE_GET(tracemalloc_domains, domain, traces); + return traces; + } } static void tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) { - trace_t trace; - int removed; - assert(_Py_tracemalloc_config.tracing); - if (_Py_tracemalloc_config.use_domain) { - pointer_t key = {ptr, domain}; - removed = _Py_HASHTABLE_POP(tracemalloc_traces, key, trace); - } - else { - removed = _Py_HASHTABLE_POP(tracemalloc_traces, ptr, trace); - } - if (!removed) { + _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); + if (!traces) { return; } + trace_t trace; + if (!_Py_HASHTABLE_POP(traces, ptr, trace)) { + return; + } assert(tracemalloc_traced_memory >= trace.size); tracemalloc_traced_memory -= trace.size; } @@ -584,54 +554,43 @@ static int tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, size_t size) { - pointer_t key = {ptr, domain}; - traceback_t *traceback; - trace_t trace; - _Py_hashtable_entry_t* entry; - int res; - assert(_Py_tracemalloc_config.tracing); - traceback = traceback_new(); + traceback_t *traceback = traceback_new(); if (traceback == NULL) { return -1; } - if (!_Py_tracemalloc_config.use_domain && domain != DEFAULT_DOMAIN) { - /* first trace using a non-zero domain whereas traces use compact - (uintptr_t) keys: switch to pointer_t keys. */ - if (tracemalloc_use_domain() < 0) { + _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); + if (traces == NULL) { + traces = tracemalloc_create_traces_table(); + if (traces == NULL) { return -1; } - } - if (_Py_tracemalloc_config.use_domain) { - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, key); - } - else { - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); + if (_Py_HASHTABLE_SET(tracemalloc_domains, domain, traces) < 0) { + _Py_hashtable_destroy(traces); + return -1; + } } + _Py_hashtable_entry_t* entry = _Py_HASHTABLE_GET_ENTRY(traces, ptr); + trace_t trace; if (entry != NULL) { /* the memory block is already tracked */ - _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace); + _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); assert(tracemalloc_traced_memory >= trace.size); tracemalloc_traced_memory -= trace.size; trace.size = size; trace.traceback = traceback; - _Py_HASHTABLE_ENTRY_WRITE_DATA(tracemalloc_traces, entry, trace); + _Py_HASHTABLE_ENTRY_WRITE_DATA(traces, entry, trace); } else { trace.size = size; trace.traceback = traceback; - if (_Py_tracemalloc_config.use_domain) { - res = _Py_HASHTABLE_SET(tracemalloc_traces, key, trace); - } - else { - res = _Py_HASHTABLE_SET(tracemalloc_traces, ptr, trace); - } + int res = _Py_HASHTABLE_SET(traces, ptr, trace); if (res != 0) { return res; } @@ -639,8 +598,9 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, assert(tracemalloc_traced_memory <= SIZE_MAX - size); tracemalloc_traced_memory += size; - if (tracemalloc_traced_memory > tracemalloc_peak_traced_memory) + if (tracemalloc_traced_memory > tracemalloc_peak_traced_memory) { tracemalloc_peak_traced_memory = tracemalloc_traced_memory; + } return 0; } @@ -691,7 +651,7 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size) TABLES_LOCK(); /* tracemalloc_add_trace() updates the trace if there is already - a trace at address (domain, ptr2) */ + a trace at address ptr2 */ if (ptr2 != ptr) { REMOVE_TRACE(ptr); } @@ -928,6 +888,7 @@ tracemalloc_clear_traces(void) TABLES_LOCK(); _Py_hashtable_clear(tracemalloc_traces); + _Py_hashtable_clear(tracemalloc_domains); tracemalloc_traced_memory = 0; tracemalloc_peak_traced_memory = 0; TABLES_UNLOCK(); @@ -983,21 +944,11 @@ tracemalloc_init(void) hashtable_hash_traceback, hashtable_compare_traceback); - if (_Py_tracemalloc_config.use_domain) { - tracemalloc_traces = hashtable_new(sizeof(pointer_t), - sizeof(trace_t), - hashtable_hash_pointer_t, - hashtable_compare_pointer_t); - } - else { - tracemalloc_traces = hashtable_new(sizeof(uintptr_t), - sizeof(trace_t), - _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); - } + tracemalloc_traces = tracemalloc_create_traces_table(); + tracemalloc_domains = tracemalloc_create_domains_table(); if (tracemalloc_filenames == NULL || tracemalloc_tracebacks == NULL - || tracemalloc_traces == NULL) { + || tracemalloc_traces == NULL || tracemalloc_domains == NULL) { PyErr_NoMemory(); return -1; } @@ -1029,9 +980,10 @@ tracemalloc_deinit(void) tracemalloc_stop(); /* destroy hash tables */ + tracemalloc_destroy_domains(tracemalloc_domains); + _Py_hashtable_destroy(tracemalloc_traces); _Py_hashtable_destroy(tracemalloc_tracebacks); _Py_hashtable_destroy(tracemalloc_filenames); - _Py_hashtable_destroy(tracemalloc_traces); #if defined(TRACE_RAW_MALLOC) if (tables_lock != NULL) { @@ -1279,31 +1231,45 @@ trace_to_pyobject(unsigned int domain, trace_t *trace, typedef struct { _Py_hashtable_t *traces; + _Py_hashtable_t *domains; _Py_hashtable_t *tracebacks; PyObject *list; + unsigned int domain; } get_traces_t; +static int +tracemalloc_get_traces_copy_domain(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *user_data) +{ + get_traces_t *get_traces = user_data; + + unsigned int domain; + _Py_HASHTABLE_ENTRY_READ_KEY(domains, entry, domain); + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + + _Py_hashtable_t *traces2 = _Py_hashtable_copy(traces); + if (_Py_HASHTABLE_SET(get_traces->domains, domain, traces2) < 0) { + _Py_hashtable_destroy(traces2); + return -1; + } + return 0; +} + + static int tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entry, void *user_data) { get_traces_t *get_traces = user_data; - unsigned int domain; trace_t trace; PyObject *tracemalloc_obj; int res; - if (_Py_tracemalloc_config.use_domain) { - pointer_t key; - _Py_HASHTABLE_ENTRY_READ_KEY(traces, entry, key); - domain = key.domain; - } - else { - domain = DEFAULT_DOMAIN; - } _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); - tracemalloc_obj = trace_to_pyobject(domain, &trace, get_traces->tracebacks); + tracemalloc_obj = trace_to_pyobject(get_traces->domain, &trace, get_traces->tracebacks); if (tracemalloc_obj == NULL) return 1; @@ -1316,6 +1282,25 @@ tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entr } +static int +tracemalloc_get_traces_domain(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *user_data) +{ + get_traces_t *get_traces = user_data; + + unsigned int domain; + _Py_HASHTABLE_ENTRY_READ_KEY(domains, entry, domain); + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + + get_traces->domain = domain; + return _Py_hashtable_foreach(traces, + tracemalloc_get_traces_fill, + get_traces); +} + + static int tracemalloc_pyobject_decref_cb(_Py_hashtable_t *tracebacks, _Py_hashtable_entry_t *entry, @@ -1345,9 +1330,9 @@ _tracemalloc__get_traces_impl(PyObject *module) /*[clinic end generated code: output=e9929876ced4b5cc input=6c7d2230b24255aa]*/ { get_traces_t get_traces; - int err; - + get_traces.domain = DEFAULT_DOMAIN; get_traces.traces = NULL; + get_traces.domains = NULL; get_traces.tracebacks = NULL; get_traces.list = PyList_New(0); if (get_traces.list == NULL) @@ -1363,28 +1348,51 @@ _tracemalloc__get_traces_impl(PyObject *module) _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct); if (get_traces.tracebacks == NULL) { - PyErr_NoMemory(); - goto error; + goto no_memory; } + get_traces.domains = tracemalloc_create_domains_table(); + if (get_traces.domains == NULL) { + goto no_memory; + } + + int err; + + // Copy all traces so tracemalloc_get_traces_fill() doesn't have to disable + // temporarily tracemalloc which would impact other threads and so would + // miss allocations while get_traces() is called. TABLES_LOCK(); get_traces.traces = _Py_hashtable_copy(tracemalloc_traces); + err = _Py_hashtable_foreach(tracemalloc_domains, + tracemalloc_get_traces_copy_domain, + &get_traces); TABLES_UNLOCK(); if (get_traces.traces == NULL) { - PyErr_NoMemory(); - goto error; + goto no_memory; + } + if (err) { + goto no_memory; } + // Convert traces to a list of tuples set_reentrant(1); err = _Py_hashtable_foreach(get_traces.traces, tracemalloc_get_traces_fill, &get_traces); + if (!err) { + err = _Py_hashtable_foreach(get_traces.domains, + tracemalloc_get_traces_domain, &get_traces); + } set_reentrant(0); - if (err) + if (err) { goto error; + } goto finally; +no_memory: + PyErr_NoMemory(); + error: Py_CLEAR(get_traces.list); @@ -1397,6 +1405,9 @@ _tracemalloc__get_traces_impl(PyObject *module) if (get_traces.traces != NULL) { _Py_hashtable_destroy(get_traces.traces); } + if (get_traces.domains != NULL) { + tracemalloc_destroy_domains(get_traces.domains); + } return get_traces.list; } @@ -1412,12 +1423,12 @@ tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) return NULL; TABLES_LOCK(); - if (_Py_tracemalloc_config.use_domain) { - pointer_t key = {ptr, domain}; - found = _Py_HASHTABLE_GET(tracemalloc_traces, key, trace); + _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); + if (traces) { + found = _Py_HASHTABLE_GET(traces, ptr, trace); } else { - found = _Py_HASHTABLE_GET(tracemalloc_traces, ptr, trace); + found = 0; } TABLES_UNLOCK(); @@ -1564,6 +1575,19 @@ _tracemalloc_get_traceback_limit_impl(PyObject *module) } +static int +tracemalloc_get_tracemalloc_memory_cb(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *user_data) +{ + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + + size_t *size = (size_t*)user_data; + *size += _Py_hashtable_size(traces); + return 0; +} + /*[clinic input] _tracemalloc.get_tracemalloc_memory @@ -1584,6 +1608,8 @@ _tracemalloc_get_tracemalloc_memory_impl(PyObject *module) TABLES_LOCK(); size += _Py_hashtable_size(tracemalloc_traces); + _Py_hashtable_foreach(tracemalloc_domains, + tracemalloc_get_tracemalloc_memory_cb, &size); TABLES_UNLOCK(); return PyLong_FromSize_t(size); @@ -1741,18 +1767,11 @@ _PyTraceMalloc_NewReference(PyObject *op) ptr = (uintptr_t)op; } - _Py_hashtable_entry_t* entry; int res = -1; TABLES_LOCK(); - if (_Py_tracemalloc_config.use_domain) { - pointer_t key = {ptr, DEFAULT_DOMAIN}; - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, key); - } - else { - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); - } - + _Py_hashtable_entry_t* entry; + entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); if (entry != NULL) { /* update the traceback of the memory block */ traceback_t *traceback = traceback_new(); From webhook-mailer at python.org Tue May 12 20:26:09 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 13 May 2020 00:26:09 -0000 Subject: [Python-checkins] bpo-40609: Remove _Py_hashtable_t.key_size (GH-20060) Message-ID: https://github.com/python/cpython/commit/f9b3b582b86b9cce8d69ec7d03d716ec81c8264a commit: f9b3b582b86b9cce8d69ec7d03d716ec81c8264a branch: master author: Victor Stinner committer: GitHub date: 2020-05-13T02:26:02+02:00 summary: bpo-40609: Remove _Py_hashtable_t.key_size (GH-20060) Rewrite _Py_hashtable_t type to always store the key as a "const void *" pointer. Add an explicit "key" member to _Py_hashtable_entry_t. Remove _Py_hashtable_t.key_size member. hash and compare functions drop their hash table parameter, and their 'key' parameter type becomes "const void *". files: M Include/internal/pycore_hashtable.h M Modules/_tracemalloc.c M Python/hashtable.c M Python/marshal.c diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 6e094e94376ad..965a4e7f2b458 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -30,32 +30,13 @@ typedef struct { _Py_slist_item_t _Py_slist_item; Py_uhash_t key_hash; - - /* key (key_size bytes) and then data (data_size bytes) follows */ + void *key; + /* data (data_size bytes) follows */ } _Py_hashtable_entry_t; -#define _Py_HASHTABLE_ENTRY_PKEY(ENTRY) \ - ((const void *)((char *)(ENTRY) \ - + sizeof(_Py_hashtable_entry_t))) - #define _Py_HASHTABLE_ENTRY_PDATA(TABLE, ENTRY) \ ((const void *)((char *)(ENTRY) \ - + sizeof(_Py_hashtable_entry_t) \ - + (TABLE)->key_size)) - -/* Get a key value from pkey: use memcpy() rather than a pointer dereference - to avoid memory alignment issues. */ -#define _Py_HASHTABLE_READ_KEY(TABLE, PKEY, DST_KEY) \ - do { \ - assert(sizeof(DST_KEY) == (TABLE)->key_size); \ - memcpy(&(DST_KEY), (PKEY), sizeof(DST_KEY)); \ - } while (0) - -#define _Py_HASHTABLE_ENTRY_READ_KEY(TABLE, ENTRY, KEY) \ - do { \ - assert(sizeof(KEY) == (TABLE)->key_size); \ - memcpy(&(KEY), _Py_HASHTABLE_ENTRY_PKEY(ENTRY), sizeof(KEY)); \ - } while (0) + + sizeof(_Py_hashtable_entry_t))) #define _Py_HASHTABLE_ENTRY_READ_DATA(TABLE, ENTRY, DATA) \ do { \ @@ -78,15 +59,12 @@ typedef struct { struct _Py_hashtable_t; typedef struct _Py_hashtable_t _Py_hashtable_t; -typedef Py_uhash_t (*_Py_hashtable_hash_func) (_Py_hashtable_t *ht, - const void *pkey); -typedef int (*_Py_hashtable_compare_func) (_Py_hashtable_t *ht, - const void *pkey, - const _Py_hashtable_entry_t *he); +typedef Py_uhash_t (*_Py_hashtable_hash_func) (const void *key); +typedef int (*_Py_hashtable_compare_func) (const void *key1, const void *key2); typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht, - const void *pkey); + const void *key); typedef int (*_Py_hashtable_get_func) (_Py_hashtable_t *ht, - const void *pkey, void *data); + const void *key, void *data); typedef struct { /* allocate a memory block */ @@ -102,7 +80,6 @@ struct _Py_hashtable_t { size_t num_buckets; size_t entries; /* Total number of entries in the table. */ _Py_slist_t *buckets; - size_t key_size; size_t data_size; _Py_hashtable_get_func get_func; @@ -113,24 +90,19 @@ struct _Py_hashtable_t { }; /* hash a pointer (void*) */ -PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr( - struct _Py_hashtable_t *ht, - const void *pkey); +PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr(const void *key); /* comparison using memcmp() */ PyAPI_FUNC(int) _Py_hashtable_compare_direct( - _Py_hashtable_t *ht, - const void *pkey, - const _Py_hashtable_entry_t *entry); + const void *key1, + const void *key2); PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new( - size_t key_size, size_t data_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func); PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( - size_t key_size, size_t data_size, size_t init_size, _Py_hashtable_hash_func hash_func, @@ -165,16 +137,15 @@ PyAPI_FUNC(size_t) _Py_hashtable_size(_Py_hashtable_t *ht); but use _Py_HASHTABLE_SET() and _Py_HASHTABLE_SET_NODATA() macros */ PyAPI_FUNC(int) _Py_hashtable_set( _Py_hashtable_t *ht, - size_t key_size, - const void *pkey, + const void *key, size_t data_size, const void *data); #define _Py_HASHTABLE_SET(TABLE, KEY, DATA) \ - _Py_hashtable_set(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) + _Py_hashtable_set(TABLE, (KEY), sizeof(DATA), &(DATA)) #define _Py_HASHTABLE_SET_NODATA(TABLE, KEY) \ - _Py_hashtable_set(TABLE, sizeof(KEY), &(KEY), 0, NULL) + _Py_hashtable_set(TABLE, (KEY), 0, NULL) /* Get an entry. @@ -183,14 +154,13 @@ PyAPI_FUNC(int) _Py_hashtable_set( Don't call directly this function, but use _Py_HASHTABLE_GET_ENTRY() macro */ static inline _Py_hashtable_entry_t * -_Py_hashtable_get_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey) +_Py_hashtable_get_entry(_Py_hashtable_t *ht, const void *key) { - assert(key_size == ht->key_size); - return ht->get_entry_func(ht, pkey); + return ht->get_entry_func(ht, key); } #define _Py_HASHTABLE_GET_ENTRY(TABLE, KEY) \ - _Py_hashtable_get_entry(TABLE, sizeof(KEY), &(KEY)) + _Py_hashtable_get_entry(TABLE, (const void *)(KEY)) /* Get data from an entry. Copy entry data into data and return 1 if the entry @@ -198,28 +168,26 @@ _Py_hashtable_get_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey) Don't call directly this function, but use _Py_HASHTABLE_GET() macro */ static inline int -_Py_hashtable_get(_Py_hashtable_t *ht, size_t key_size, const void *pkey, +_Py_hashtable_get(_Py_hashtable_t *ht, const void *key, size_t data_size, void *data) { - assert(key_size == ht->key_size); assert(data_size == ht->data_size); - return ht->get_func(ht, pkey, data); + return ht->get_func(ht, key, data); } #define _Py_HASHTABLE_GET(TABLE, KEY, DATA) \ - _Py_hashtable_get(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) + _Py_hashtable_get(TABLE, (KEY), sizeof(DATA), &(DATA)) /* Don't call directly this function, but use _Py_HASHTABLE_POP() macro */ PyAPI_FUNC(int) _Py_hashtable_pop( _Py_hashtable_t *ht, - size_t key_size, - const void *pkey, + const void *key, size_t data_size, void *data); #define _Py_HASHTABLE_POP(TABLE, KEY, DATA) \ - _Py_hashtable_pop(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) + _Py_hashtable_pop(TABLE, (KEY), sizeof(DATA), &(DATA)) #ifdef __cplusplus diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 7e31abe05fb6b..050fe03bba8ec 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -23,6 +23,9 @@ static void raw_free(void *ptr); # define TRACE_DEBUG #endif +#define TO_PTR(key) ((const void *)(uintptr_t)key) +#define FROM_PTR(key) ((uintptr_t)key) + /* Protected by the GIL */ static struct { PyMemAllocatorEx mem; @@ -203,47 +206,42 @@ set_reentrant(int reentrant) static Py_uhash_t -hashtable_hash_pyobject(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_pyobject(const void *key) { - PyObject *obj; - - _Py_HASHTABLE_READ_KEY(ht, pkey, obj); + PyObject *obj = (PyObject *)key; return PyObject_Hash(obj); } static int -hashtable_compare_unicode(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) +hashtable_compare_unicode(const void *key1, const void *key2) { - PyObject *key1, *key2; - - _Py_HASHTABLE_READ_KEY(ht, pkey, key1); - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, key2); - - if (key1 != NULL && key2 != NULL) - return (PyUnicode_Compare(key1, key2) == 0); - else - return key1 == key2; + PyObject *obj1 = (PyObject *)key1; + PyObject *obj2 = (PyObject *)key2; + if (obj1 != NULL && obj2 != NULL) { + return (PyUnicode_Compare(obj1, obj2) == 0); + } + else { + return obj1 == obj2; + } } static Py_uhash_t -hashtable_hash_uint(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_uint(const void *key_raw) { - unsigned int key; - _Py_HASHTABLE_READ_KEY(ht, pkey, key); + unsigned int key = (unsigned int)FROM_PTR(key_raw); return (Py_uhash_t)key; } static _Py_hashtable_t * -hashtable_new(size_t key_size, size_t data_size, +hashtable_new(size_t data_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func) { _Py_hashtable_allocator_t hashtable_alloc = {malloc, free}; - return _Py_hashtable_new_full(key_size, data_size, 0, + return _Py_hashtable_new_full(data_size, 0, hash_func, compare_func, &hashtable_alloc); } @@ -263,39 +261,33 @@ raw_free(void *ptr) static Py_uhash_t -hashtable_hash_traceback(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_traceback(const void *key) { - traceback_t *traceback; - - _Py_HASHTABLE_READ_KEY(ht, pkey, traceback); + const traceback_t *traceback = (const traceback_t *)key; return traceback->hash; } static int -hashtable_compare_traceback(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) +hashtable_compare_traceback(const void *key1, const void *key2) { - traceback_t *traceback1, *traceback2; - const frame_t *frame1, *frame2; - int i; + const traceback_t *traceback1 = (const traceback_t *)key1; + const traceback_t *traceback2 = (const traceback_t *)key2; - _Py_HASHTABLE_READ_KEY(ht, pkey, traceback1); - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, traceback2); - - if (traceback1->nframe != traceback2->nframe) + if (traceback1->nframe != traceback2->nframe) { return 0; - - if (traceback1->total_nframe != traceback2->total_nframe) + } + if (traceback1->total_nframe != traceback2->total_nframe) { return 0; + } - for (i=0; i < traceback1->nframe; i++) { - frame1 = &traceback1->frames[i]; - frame2 = &traceback2->frames[i]; + for (int i=0; i < traceback1->nframe; i++) { + const frame_t *frame1 = &traceback1->frames[i]; + const frame_t *frame2 = &traceback2->frames[i]; - if (frame1->lineno != frame2->lineno) + if (frame1->lineno != frame2->lineno) { return 0; - + } if (frame1->filename != frame2->filename) { assert(PyUnicode_Compare(frame1->filename, frame2->filename) != 0); return 0; @@ -349,7 +341,7 @@ tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame) _Py_hashtable_entry_t *entry; entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_filenames, filename); if (entry != NULL) { - _Py_HASHTABLE_ENTRY_READ_KEY(tracemalloc_filenames, entry, filename); + filename = (PyObject *)entry->key; } else { /* tracemalloc_filenames is responsible to keep a reference @@ -444,7 +436,7 @@ traceback_new(void) /* intern the traceback */ entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_tracebacks, traceback); if (entry != NULL) { - _Py_HASHTABLE_ENTRY_READ_KEY(tracemalloc_tracebacks, entry, traceback); + traceback = (traceback_t *)entry->key; } else { traceback_t *copy; @@ -477,8 +469,7 @@ traceback_new(void) static _Py_hashtable_t* tracemalloc_create_traces_table(void) { - return hashtable_new(sizeof(uintptr_t), - sizeof(trace_t), + return hashtable_new(sizeof(trace_t), _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct); } @@ -487,8 +478,7 @@ tracemalloc_create_traces_table(void) static _Py_hashtable_t* tracemalloc_create_domains_table(void) { - return hashtable_new(sizeof(unsigned int), - sizeof(_Py_hashtable_t *), + return hashtable_new(sizeof(_Py_hashtable_t *), hashtable_hash_uint, _Py_hashtable_compare_direct); } @@ -522,7 +512,7 @@ tracemalloc_get_traces_table(unsigned int domain) } else { _Py_hashtable_t *traces = NULL; - (void)_Py_HASHTABLE_GET(tracemalloc_domains, domain, traces); + (void)_Py_HASHTABLE_GET(tracemalloc_domains, TO_PTR(domain), traces); return traces; } } @@ -539,7 +529,7 @@ tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) } trace_t trace; - if (!_Py_HASHTABLE_POP(traces, ptr, trace)) { + if (!_Py_HASHTABLE_POP(traces, TO_PTR(ptr), trace)) { return; } assert(tracemalloc_traced_memory >= trace.size); @@ -568,7 +558,7 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, return -1; } - if (_Py_HASHTABLE_SET(tracemalloc_domains, domain, traces) < 0) { + if (_Py_HASHTABLE_SET(tracemalloc_domains, TO_PTR(domain), traces) < 0) { _Py_hashtable_destroy(traces); return -1; } @@ -590,7 +580,7 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, trace.size = size; trace.traceback = traceback; - int res = _Py_HASHTABLE_SET(traces, ptr, trace); + int res = _Py_HASHTABLE_SET(traces, TO_PTR(ptr), trace); if (res != 0) { return res; } @@ -859,9 +849,7 @@ static int tracemalloc_clear_filename(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, void *user_data) { - PyObject *filename; - - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, filename); + PyObject *filename = (PyObject *)entry->key; Py_DECREF(filename); return 0; } @@ -871,9 +859,7 @@ static int traceback_free_traceback(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, void *user_data) { - traceback_t *traceback; - - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, traceback); + traceback_t *traceback = (traceback_t *)entry->key; raw_free(traceback); return 0; } @@ -936,11 +922,11 @@ tracemalloc_init(void) } #endif - tracemalloc_filenames = hashtable_new(sizeof(PyObject *), 0, + tracemalloc_filenames = hashtable_new(0, hashtable_hash_pyobject, hashtable_compare_unicode); - tracemalloc_tracebacks = hashtable_new(sizeof(traceback_t *), 0, + tracemalloc_tracebacks = hashtable_new(0, hashtable_hash_traceback, hashtable_compare_traceback); @@ -1154,7 +1140,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) PyObject *frames, *frame; if (intern_table != NULL) { - if (_Py_HASHTABLE_GET(intern_table, traceback, frames)) { + if (_Py_HASHTABLE_GET(intern_table, (const void *)traceback, frames)) { Py_INCREF(frames); return frames; } @@ -1244,13 +1230,12 @@ tracemalloc_get_traces_copy_domain(_Py_hashtable_t *domains, { get_traces_t *get_traces = user_data; - unsigned int domain; - _Py_HASHTABLE_ENTRY_READ_KEY(domains, entry, domain); + unsigned int domain = (unsigned int)FROM_PTR(entry->key); _Py_hashtable_t *traces; _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); _Py_hashtable_t *traces2 = _Py_hashtable_copy(traces); - if (_Py_HASHTABLE_SET(get_traces->domains, domain, traces2) < 0) { + if (_Py_HASHTABLE_SET(get_traces->domains, TO_PTR(domain), traces2) < 0) { _Py_hashtable_destroy(traces2); return -1; } @@ -1289,8 +1274,7 @@ tracemalloc_get_traces_domain(_Py_hashtable_t *domains, { get_traces_t *get_traces = user_data; - unsigned int domain; - _Py_HASHTABLE_ENTRY_READ_KEY(domains, entry, domain); + unsigned int domain = (unsigned int)FROM_PTR(entry->key); _Py_hashtable_t *traces; _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); @@ -1343,8 +1327,7 @@ _tracemalloc__get_traces_impl(PyObject *module) /* the traceback hash table is used temporarily to intern traceback tuple of (filename, lineno) tuples */ - get_traces.tracebacks = hashtable_new(sizeof(traceback_t *), - sizeof(PyObject *), + get_traces.tracebacks = hashtable_new(sizeof(PyObject *), _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct); if (get_traces.tracebacks == NULL) { @@ -1425,7 +1408,7 @@ tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) TABLES_LOCK(); _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); if (traces) { - found = _Py_HASHTABLE_GET(traces, ptr, trace); + found = _Py_HASHTABLE_GET(traces, TO_PTR(ptr), trace); } else { found = 0; diff --git a/Python/hashtable.c b/Python/hashtable.c index 90fe34e628016..01d84398cc79f 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -59,7 +59,7 @@ #define ENTRY_NEXT(ENTRY) \ ((_Py_hashtable_entry_t *)_Py_SLIST_ITEM_NEXT(ENTRY)) #define HASHTABLE_ITEM_SIZE(HT) \ - (sizeof(_Py_hashtable_entry_t) + (HT)->key_size + (HT)->data_size) + (sizeof(_Py_hashtable_entry_t) + (HT)->data_size) #define ENTRY_READ_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ do { \ @@ -105,20 +105,16 @@ _Py_slist_remove(_Py_slist_t *list, _Py_slist_item_t *previous, Py_uhash_t -_Py_hashtable_hash_ptr(struct _Py_hashtable_t *ht, const void *pkey) +_Py_hashtable_hash_ptr(const void *key) { - void *key; - _Py_HASHTABLE_READ_KEY(ht, pkey, key); return (Py_uhash_t)_Py_HashPointerRaw(key); } int -_Py_hashtable_compare_direct(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) +_Py_hashtable_compare_direct(const void *key1, const void *key2) { - const void *pkey2 = _Py_HASHTABLE_ENTRY_PKEY(entry); - return (memcmp(pkey, pkey2, ht->key_size) == 0); + return (key1 == key2); } @@ -195,16 +191,16 @@ _Py_hashtable_print_stats(_Py_hashtable_t *ht) _Py_hashtable_entry_t * -_Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *pkey) +_Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) { - Py_uhash_t key_hash = ht->hash_func(ht, pkey); + Py_uhash_t key_hash = ht->hash_func(key); size_t index = key_hash & (ht->num_buckets - 1); _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); while (1) { if (entry == NULL) { return NULL; } - if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) { + if (entry->key_hash == key_hash && ht->compare_func(key, entry->key)) { break; } entry = ENTRY_NEXT(entry); @@ -214,28 +210,27 @@ _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *pkey) static int -_Py_hashtable_pop_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey, +_Py_hashtable_pop_entry(_Py_hashtable_t *ht, const void *key, void *data, size_t data_size) { - Py_uhash_t key_hash; - size_t index; - _Py_hashtable_entry_t *entry, *previous; - assert(key_size == ht->key_size); - - key_hash = ht->hash_func(ht, pkey); - index = key_hash & (ht->num_buckets - 1); + Py_uhash_t key_hash = ht->hash_func(key); + size_t index = key_hash & (ht->num_buckets - 1); - previous = NULL; - for (entry = TABLE_HEAD(ht, index); entry != NULL; entry = ENTRY_NEXT(entry)) { - if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, index); + _Py_hashtable_entry_t *previous = NULL; + while (1) { + if (entry == NULL) { + // not found + return 0; + } + if (entry->key_hash == key_hash && ht->compare_func(key, entry->key)) { break; + } previous = entry; + entry = ENTRY_NEXT(entry); } - if (entry == NULL) - return 0; - _Py_slist_remove(&ht->buckets[index], (_Py_slist_item_t *)previous, (_Py_slist_item_t *)entry); ht->entries--; @@ -251,26 +246,22 @@ _Py_hashtable_pop_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey, int -_Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, +_Py_hashtable_set(_Py_hashtable_t *ht, const void *key, size_t data_size, const void *data) { - Py_uhash_t key_hash; - size_t index; _Py_hashtable_entry_t *entry; - assert(key_size == ht->key_size); - assert(data != NULL || data_size == 0); #ifndef NDEBUG /* Don't write the assertion on a single line because it is interesting to know the duplicated entry if the assertion failed. The entry can be read using a debugger. */ - entry = ht->get_entry_func(ht, pkey); + entry = ht->get_entry_func(ht, key); assert(entry == NULL); #endif - key_hash = ht->hash_func(ht, pkey); - index = key_hash & (ht->num_buckets - 1); + Py_uhash_t key_hash = ht->hash_func(key); + size_t index = key_hash & (ht->num_buckets - 1); entry = ht->alloc.malloc(HASHTABLE_ITEM_SIZE(ht)); if (entry == NULL) { @@ -279,9 +270,10 @@ _Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, } entry->key_hash = key_hash; - memcpy((void *)_Py_HASHTABLE_ENTRY_PKEY(entry), pkey, ht->key_size); - if (data) + entry->key = (void *)key; + if (data) { ENTRY_WRITE_PDATA(ht, entry, data_size, data); + } _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); ht->entries++; @@ -293,10 +285,10 @@ _Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, int -_Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *pkey, void *data) +_Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *key, void *data) { assert(data != NULL); - _Py_hashtable_entry_t *entry = ht->get_entry_func(ht, pkey); + _Py_hashtable_entry_t *entry = ht->get_entry_func(ht, key); if (entry != NULL) { ENTRY_READ_PDATA(ht, entry, ht->data_size, data); return 1; @@ -308,13 +300,12 @@ _Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *pkey, void *data) // Specialized for: -// key_size == sizeof(void*) // hash_func == _Py_hashtable_hash_ptr // compare_func == _Py_hashtable_compare_direct _Py_hashtable_entry_t * -_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *pkey) +_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) { - Py_uhash_t key_hash = _Py_hashtable_hash_ptr(ht, pkey); + Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); size_t index = key_hash & (ht->num_buckets - 1); _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); while (1) { @@ -322,8 +313,7 @@ _Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *pkey) return NULL; } if (entry->key_hash == key_hash) { - const void *pkey2 = _Py_HASHTABLE_ENTRY_PKEY(entry); - if (memcmp(pkey, pkey2, sizeof(void*)) == 0) { + if (entry->key == key) { break; } } @@ -334,14 +324,13 @@ _Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *pkey) // Specialized for: -// key_size == sizeof(void*) // hash_func == _Py_hashtable_hash_ptr // compare_func == _Py_hashtable_compare_direct int -_Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *pkey, void *data) +_Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *key, void *data) { assert(data != NULL); - _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry_ptr(ht, pkey); + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry_ptr(ht, key); if (entry != NULL) { ENTRY_READ_PDATA(ht, entry, ht->data_size, data); return 1; @@ -353,24 +342,24 @@ _Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *pkey, void *data) int -_Py_hashtable_pop(_Py_hashtable_t *ht, size_t key_size, const void *pkey, +_Py_hashtable_pop(_Py_hashtable_t *ht, const void *key, size_t data_size, void *data) { assert(data != NULL); - return _Py_hashtable_pop_entry(ht, key_size, pkey, data, data_size); + return _Py_hashtable_pop_entry(ht, key, data, data_size); } /* Code commented since the function is not needed in Python */ #if 0 void -_Py_hashtable_delete(_Py_hashtable_t *ht, size_t key_size, const void *pkey) +_Py_hashtable_delete(_Py_hashtable_t *ht, size_t const void *key) { #ifndef NDEBUG - int found = _Py_hashtable_pop_entry(ht, key_size, pkey, NULL, 0); + int found = _Py_hashtable_pop_entry(ht, key, NULL, 0); assert(found); #else - (void)_Py_hashtable_pop_entry(ht, key_size, pkey, NULL, 0); + (void)_Py_hashtable_pop_entry(ht, key, NULL, 0); #endif } #endif @@ -427,7 +416,7 @@ hashtable_rehash(_Py_hashtable_t *ht) size_t entry_index; - assert(ht->hash_func(ht, _Py_HASHTABLE_ENTRY_PKEY(entry)) == entry->key_hash); + assert(ht->hash_func(entry->key) == entry->key_hash); next = ENTRY_NEXT(entry); entry_index = entry->key_hash & (new_size - 1); @@ -440,8 +429,7 @@ hashtable_rehash(_Py_hashtable_t *ht) _Py_hashtable_t * -_Py_hashtable_new_full(size_t key_size, size_t data_size, - size_t init_size, +_Py_hashtable_new_full(size_t data_size, size_t init_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, _Py_hashtable_allocator_t *allocator) @@ -464,7 +452,6 @@ _Py_hashtable_new_full(size_t key_size, size_t data_size, ht->num_buckets = round_size(init_size); ht->entries = 0; - ht->key_size = key_size; ht->data_size = data_size; buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); @@ -480,8 +467,7 @@ _Py_hashtable_new_full(size_t key_size, size_t data_size, ht->hash_func = hash_func; ht->compare_func = compare_func; ht->alloc = alloc; - if (ht->key_size == sizeof(void*) - && ht->hash_func == _Py_hashtable_hash_ptr + if (ht->hash_func == _Py_hashtable_hash_ptr && ht->compare_func == _Py_hashtable_compare_direct) { ht->get_func = _Py_hashtable_get_ptr; @@ -492,12 +478,11 @@ _Py_hashtable_new_full(size_t key_size, size_t data_size, _Py_hashtable_t * -_Py_hashtable_new(size_t key_size, size_t data_size, +_Py_hashtable_new(size_t data_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func) { - return _Py_hashtable_new_full(key_size, data_size, - HASHTABLE_MIN_SIZE, + return _Py_hashtable_new_full(data_size, HASHTABLE_MIN_SIZE, hash_func, compare_func, NULL); } @@ -543,15 +528,13 @@ _Py_hashtable_destroy(_Py_hashtable_t *ht) _Py_hashtable_t * _Py_hashtable_copy(_Py_hashtable_t *src) { - const size_t key_size = src->key_size; const size_t data_size = src->data_size; _Py_hashtable_t *dst; _Py_hashtable_entry_t *entry; size_t bucket; int err; - dst = _Py_hashtable_new_full(key_size, data_size, - src->num_buckets, + dst = _Py_hashtable_new_full(data_size, src->num_buckets, src->hash_func, src->compare_func, &src->alloc); @@ -561,9 +544,9 @@ _Py_hashtable_copy(_Py_hashtable_t *src) for (bucket=0; bucket < src->num_buckets; bucket++) { entry = TABLE_HEAD(src, bucket); for (; entry; entry = ENTRY_NEXT(entry)) { - const void *pkey = _Py_HASHTABLE_ENTRY_PKEY(entry); + const void *key = entry->key; const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(src, entry); - err = _Py_hashtable_set(dst, key_size, pkey, data_size, pdata); + err = _Py_hashtable_set(dst, key, data_size, pdata); if (err) { _Py_hashtable_destroy(dst); return NULL; diff --git a/Python/marshal.c b/Python/marshal.c index d2bff524f30dd..1e901ae7c3133 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -549,7 +549,7 @@ static int w_init_refs(WFILE *wf, int version) { if (version >= 3) { - wf->hashtable = _Py_hashtable_new(sizeof(PyObject *), sizeof(int), + wf->hashtable = _Py_hashtable_new(sizeof(int), _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct); if (wf->hashtable == NULL) { @@ -564,9 +564,7 @@ static int w_decref_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, void *Py_UNUSED(data)) { - PyObject *entry_key; - - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, entry_key); + PyObject *entry_key = (PyObject *)entry->key; Py_XDECREF(entry_key); return 0; } From webhook-mailer at python.org Tue May 12 20:50:26 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 13 May 2020 00:50:26 -0000 Subject: [Python-checkins] bpo-40609: Add destroy functions to _Py_hashtable (GH-20062) Message-ID: https://github.com/python/cpython/commit/2d0a3d682f699cce8db6e30981d41d9125318726 commit: 2d0a3d682f699cce8db6e30981d41d9125318726 branch: master author: Victor Stinner committer: GitHub date: 2020-05-13T02:50:18+02:00 summary: bpo-40609: Add destroy functions to _Py_hashtable (GH-20062) Add key_destroy_func and value_destroy_func parameters to _Py_hashtable_new_full(). marshal.c and _tracemalloc.c use these destroy functions. files: M Include/internal/pycore_hashtable.h M Modules/_tracemalloc.c M Python/hashtable.c M Python/marshal.c diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 965a4e7f2b458..3c7483a058f71 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -34,21 +34,21 @@ typedef struct { /* data (data_size bytes) follows */ } _Py_hashtable_entry_t; -#define _Py_HASHTABLE_ENTRY_PDATA(TABLE, ENTRY) \ +#define _Py_HASHTABLE_ENTRY_PDATA(ENTRY) \ ((const void *)((char *)(ENTRY) \ + sizeof(_Py_hashtable_entry_t))) #define _Py_HASHTABLE_ENTRY_READ_DATA(TABLE, ENTRY, DATA) \ do { \ assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy(&(DATA), _Py_HASHTABLE_ENTRY_PDATA(TABLE, (ENTRY)), \ + memcpy(&(DATA), _Py_HASHTABLE_ENTRY_PDATA((ENTRY)), \ sizeof(DATA)); \ } while (0) #define _Py_HASHTABLE_ENTRY_WRITE_DATA(TABLE, ENTRY, DATA) \ do { \ assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA((TABLE), (ENTRY)), \ + memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ &(DATA), sizeof(DATA)); \ } while (0) @@ -61,6 +61,9 @@ typedef struct _Py_hashtable_t _Py_hashtable_t; typedef Py_uhash_t (*_Py_hashtable_hash_func) (const void *key); typedef int (*_Py_hashtable_compare_func) (const void *key1, const void *key2); +typedef void (*_Py_hashtable_destroy_func) (void *key); +typedef void (*_Py_hashtable_value_destroy_func) (_Py_hashtable_t *ht, + _Py_hashtable_entry_t *entry); typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht, const void *key); typedef int (*_Py_hashtable_get_func) (_Py_hashtable_t *ht, @@ -86,6 +89,8 @@ struct _Py_hashtable_t { _Py_hashtable_get_entry_func get_entry_func; _Py_hashtable_hash_func hash_func; _Py_hashtable_compare_func compare_func; + _Py_hashtable_destroy_func key_destroy_func; + _Py_hashtable_value_destroy_func value_destroy_func; _Py_hashtable_allocator_t alloc; }; @@ -107,6 +112,8 @@ PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( size_t init_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, + _Py_hashtable_destroy_func key_destroy_func, + _Py_hashtable_value_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator); PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 050fe03bba8ec..618bf476d99ad 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -238,12 +238,13 @@ hashtable_hash_uint(const void *key_raw) static _Py_hashtable_t * hashtable_new(size_t data_size, _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func) + _Py_hashtable_compare_func compare_func, + _Py_hashtable_value_destroy_func value_destroy_fun) { _Py_hashtable_allocator_t hashtable_alloc = {malloc, free}; return _Py_hashtable_new_full(data_size, 0, hash_func, compare_func, - &hashtable_alloc); + NULL, value_destroy_fun, &hashtable_alloc); } @@ -471,35 +472,34 @@ tracemalloc_create_traces_table(void) { return hashtable_new(sizeof(trace_t), _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); + _Py_hashtable_compare_direct, + NULL); } -static _Py_hashtable_t* -tracemalloc_create_domains_table(void) +static void +tracemalloc_destroy_domain_table(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry) { - return hashtable_new(sizeof(_Py_hashtable_t *), - hashtable_hash_uint, - _Py_hashtable_compare_direct); + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + _Py_hashtable_destroy(traces); } -static int -tracemalloc_destroy_domains_cb(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, - void *user_data) +static _Py_hashtable_t* +tracemalloc_create_domains_table(void) { - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); - _Py_hashtable_destroy(traces); - return 0; + return hashtable_new(sizeof(_Py_hashtable_t *), + hashtable_hash_uint, + _Py_hashtable_compare_direct, + tracemalloc_destroy_domain_table); } static void tracemalloc_destroy_domains(_Py_hashtable_t *domains) { - _Py_hashtable_foreach(domains, tracemalloc_destroy_domains_cb, NULL); _Py_hashtable_destroy(domains); } @@ -924,11 +924,13 @@ tracemalloc_init(void) tracemalloc_filenames = hashtable_new(0, hashtable_hash_pyobject, - hashtable_compare_unicode); + hashtable_compare_unicode, + NULL); tracemalloc_tracebacks = hashtable_new(0, hashtable_hash_traceback, - hashtable_compare_traceback); + hashtable_compare_traceback, + NULL); tracemalloc_traces = tracemalloc_create_traces_table(); tracemalloc_domains = tracemalloc_create_domains_table(); @@ -1285,15 +1287,13 @@ tracemalloc_get_traces_domain(_Py_hashtable_t *domains, } -static int +static void tracemalloc_pyobject_decref_cb(_Py_hashtable_t *tracebacks, - _Py_hashtable_entry_t *entry, - void *user_data) + _Py_hashtable_entry_t *entry) { PyObject *obj; _Py_HASHTABLE_ENTRY_READ_DATA(tracebacks, entry, obj); Py_DECREF(obj); - return 0; } @@ -1329,7 +1329,8 @@ _tracemalloc__get_traces_impl(PyObject *module) of (filename, lineno) tuples */ get_traces.tracebacks = hashtable_new(sizeof(PyObject *), _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); + _Py_hashtable_compare_direct, + tracemalloc_pyobject_decref_cb); if (get_traces.tracebacks == NULL) { goto no_memory; } @@ -1381,8 +1382,6 @@ _tracemalloc__get_traces_impl(PyObject *module) finally: if (get_traces.tracebacks != NULL) { - _Py_hashtable_foreach(get_traces.tracebacks, - tracemalloc_pyobject_decref_cb, NULL); _Py_hashtable_destroy(get_traces.tracebacks); } if (get_traces.traces != NULL) { diff --git a/Python/hashtable.c b/Python/hashtable.c index 01d84398cc79f..0c013bbccf557 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -64,14 +64,14 @@ #define ENTRY_READ_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ do { \ assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((PDATA), _Py_HASHTABLE_ENTRY_PDATA(TABLE, (ENTRY)), \ + memcpy((PDATA), _Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ (DATA_SIZE)); \ } while (0) #define ENTRY_WRITE_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ do { \ assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA((TABLE), (ENTRY)), \ + memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ (PDATA), (DATA_SIZE)); \ } while (0) @@ -432,6 +432,8 @@ _Py_hashtable_t * _Py_hashtable_new_full(size_t data_size, size_t init_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, + _Py_hashtable_destroy_func key_destroy_func, + _Py_hashtable_value_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator) { _Py_hashtable_t *ht; @@ -466,6 +468,8 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, ht->get_entry_func = _Py_hashtable_get_entry_generic; ht->hash_func = hash_func; ht->compare_func = compare_func; + ht->key_destroy_func = key_destroy_func; + ht->value_destroy_func = value_destroy_func; ht->alloc = alloc; if (ht->hash_func == _Py_hashtable_hash_ptr && ht->compare_func == _Py_hashtable_compare_direct) @@ -484,7 +488,7 @@ _Py_hashtable_new(size_t data_size, { return _Py_hashtable_new_full(data_size, HASHTABLE_MIN_SIZE, hash_func, compare_func, - NULL); + NULL, NULL, NULL); } @@ -506,16 +510,27 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) } +static void +_Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry) +{ + if (ht->key_destroy_func) { + ht->key_destroy_func(entry->key); + } + if (ht->value_destroy_func) { + ht->value_destroy_func(ht, entry); + } + ht->alloc.free(entry); +} + + void _Py_hashtable_destroy(_Py_hashtable_t *ht) { - size_t i; - - for (i = 0; i < ht->num_buckets; i++) { - _Py_slist_item_t *entry = ht->buckets[i].head; + for (size_t i = 0; i < ht->num_buckets; i++) { + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i); while (entry) { - _Py_slist_item_t *entry_next = entry->next; - ht->alloc.free(entry); + _Py_hashtable_entry_t *entry_next = ENTRY_NEXT(entry); + _Py_hashtable_destroy_entry(ht, entry); entry = entry_next; } } @@ -537,6 +552,8 @@ _Py_hashtable_copy(_Py_hashtable_t *src) dst = _Py_hashtable_new_full(data_size, src->num_buckets, src->hash_func, src->compare_func, + src->key_destroy_func, + src->value_destroy_func, &src->alloc); if (dst == NULL) return NULL; @@ -545,7 +562,7 @@ _Py_hashtable_copy(_Py_hashtable_t *src) entry = TABLE_HEAD(src, bucket); for (; entry; entry = ENTRY_NEXT(entry)) { const void *key = entry->key; - const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(src, entry); + const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(entry); err = _Py_hashtable_set(dst, key, data_size, pdata); if (err) { _Py_hashtable_destroy(dst); diff --git a/Python/marshal.c b/Python/marshal.c index 1e901ae7c3133..7c99c1ee13c0e 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -545,13 +545,21 @@ w_complex_object(PyObject *v, char flag, WFILE *p) } } +static void +w_decref_entry(void *key) +{ + PyObject *entry_key = (PyObject *)key; + Py_XDECREF(entry_key); +} + static int w_init_refs(WFILE *wf, int version) { if (version >= 3) { - wf->hashtable = _Py_hashtable_new(sizeof(int), - _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); + wf->hashtable = _Py_hashtable_new_full(sizeof(int), 0, + _Py_hashtable_hash_ptr, + _Py_hashtable_compare_direct, + w_decref_entry, NULL, NULL); if (wf->hashtable == NULL) { PyErr_NoMemory(); return -1; @@ -560,20 +568,10 @@ w_init_refs(WFILE *wf, int version) return 0; } -static int -w_decref_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, - void *Py_UNUSED(data)) -{ - PyObject *entry_key = (PyObject *)entry->key; - Py_XDECREF(entry_key); - return 0; -} - static void w_clear_refs(WFILE *wf) { if (wf->hashtable != NULL) { - _Py_hashtable_foreach(wf->hashtable, w_decref_entry, NULL); _Py_hashtable_destroy(wf->hashtable); } } From webhook-mailer at python.org Tue May 12 21:52:18 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 13 May 2020 01:52:18 -0000 Subject: [Python-checkins] bpo-40609: _tracemalloc allocates traces (GH-20064) Message-ID: https://github.com/python/cpython/commit/d95bd4214c2babe851b02562d973d60c02e639b7 commit: d95bd4214c2babe851b02562d973d60c02e639b7 branch: master author: Victor Stinner committer: GitHub date: 2020-05-13T03:52:11+02:00 summary: bpo-40609: _tracemalloc allocates traces (GH-20064) Rewrite _tracemalloc to store "trace_t*" rather than directly "trace_t" in traces hash tables. Traces are now allocated on the heap memory, outside the hash table. Add tracemalloc_copy_traces() and tracemalloc_copy_domains() helper functions. Remove _Py_hashtable_copy() function since there is no API to copy a key or a value. Remove also _Py_hashtable_delete() function which was commented. files: M Include/internal/pycore_hashtable.h M Modules/_tracemalloc.c M Python/hashtable.c diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 3c7483a058f71..0da2ffdb389e5 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -45,13 +45,6 @@ typedef struct { sizeof(DATA)); \ } while (0) -#define _Py_HASHTABLE_ENTRY_WRITE_DATA(TABLE, ENTRY, DATA) \ - do { \ - assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ - &(DATA), sizeof(DATA)); \ - } while (0) - /* _Py_hashtable: prototypes */ @@ -118,9 +111,6 @@ PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); -/* Return a copy of the hash table */ -PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_copy(_Py_hashtable_t *src); - PyAPI_FUNC(void) _Py_hashtable_clear(_Py_hashtable_t *ht); typedef int (*_Py_hashtable_foreach_func) (_Py_hashtable_t *ht, diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 618bf476d99ad..a42349a8e4719 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -122,7 +122,7 @@ static traceback_t *tracemalloc_traceback = NULL; Protected by the GIL */ static _Py_hashtable_t *tracemalloc_tracebacks = NULL; -/* pointer (void*) => trace (trace_t). +/* pointer (void*) => trace (trace_t*). Protected by TABLES_LOCK(). */ static _Py_hashtable_t *tracemalloc_traces = NULL; @@ -467,13 +467,23 @@ traceback_new(void) } +static void +tracemalloc_destroy_trace_cb(_Py_hashtable_t *traces, + _Py_hashtable_entry_t *entry) +{ + trace_t *trace; + _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); + raw_free(trace); +} + + static _Py_hashtable_t* tracemalloc_create_traces_table(void) { - return hashtable_new(sizeof(trace_t), + return hashtable_new(sizeof(trace_t*), _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct, - NULL); + tracemalloc_destroy_trace_cb); } @@ -528,12 +538,13 @@ tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) return; } - trace_t trace; + trace_t *trace; if (!_Py_HASHTABLE_POP(traces, TO_PTR(ptr), trace)) { return; } - assert(tracemalloc_traced_memory >= trace.size); - tracemalloc_traced_memory -= trace.size; + assert(tracemalloc_traced_memory >= trace->size); + tracemalloc_traced_memory -= trace->size; + raw_free(trace); } #define REMOVE_TRACE(ptr) \ @@ -565,23 +576,27 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, } _Py_hashtable_entry_t* entry = _Py_HASHTABLE_GET_ENTRY(traces, ptr); - trace_t trace; if (entry != NULL) { /* the memory block is already tracked */ + trace_t *trace; _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); - assert(tracemalloc_traced_memory >= trace.size); - tracemalloc_traced_memory -= trace.size; + assert(tracemalloc_traced_memory >= trace->size); + tracemalloc_traced_memory -= trace->size; - trace.size = size; - trace.traceback = traceback; - _Py_HASHTABLE_ENTRY_WRITE_DATA(traces, entry, trace); + trace->size = size; + trace->traceback = traceback; } else { - trace.size = size; - trace.traceback = traceback; + trace_t *trace = raw_malloc(sizeof(trace_t)); + if (trace == NULL) { + return -1; + } + trace->size = size; + trace->traceback = traceback; int res = _Py_HASHTABLE_SET(traces, TO_PTR(ptr), trace); if (res != 0) { + raw_free(trace); return res; } } @@ -1225,19 +1240,62 @@ typedef struct { unsigned int domain; } get_traces_t; + static int -tracemalloc_get_traces_copy_domain(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, - void *user_data) +tracemalloc_copy_trace(_Py_hashtable_t *traces, + _Py_hashtable_entry_t *entry, + void *traces2_raw) { - get_traces_t *get_traces = user_data; + _Py_hashtable_t *traces2 = (_Py_hashtable_t *)traces2_raw; + + trace_t *trace; + _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); + + trace_t *trace2 = raw_malloc(sizeof(trace_t)); + if (traces2 == NULL) { + return -1; + } + *trace2 = *trace; + if (_Py_HASHTABLE_SET(traces2, entry->key, trace2) < 0) { + raw_free(trace2); + return -1; + } + return 0; +} + + +static _Py_hashtable_t* +tracemalloc_copy_traces(_Py_hashtable_t *traces) +{ + _Py_hashtable_t *traces2 = tracemalloc_create_traces_table(); + if (traces2 == NULL) { + return NULL; + } + + int err = _Py_hashtable_foreach(traces, + tracemalloc_copy_trace, + traces2); + if (err) { + _Py_hashtable_destroy(traces2); + return NULL; + } + return traces2; +} + + +static int +tracemalloc_copy_domain(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *domains2_raw) +{ + _Py_hashtable_t *domains2 = (_Py_hashtable_t *)domains2_raw; unsigned int domain = (unsigned int)FROM_PTR(entry->key); _Py_hashtable_t *traces; _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); - _Py_hashtable_t *traces2 = _Py_hashtable_copy(traces); - if (_Py_HASHTABLE_SET(get_traces->domains, TO_PTR(domain), traces2) < 0) { + _Py_hashtable_t *traces2 = tracemalloc_copy_traces(traces); + if (_Py_HASHTABLE_SET(domains2, TO_PTR(domain), traces2) < 0) { _Py_hashtable_destroy(traces2); return -1; } @@ -1245,18 +1303,37 @@ tracemalloc_get_traces_copy_domain(_Py_hashtable_t *domains, } +static _Py_hashtable_t* +tracemalloc_copy_domains(_Py_hashtable_t *domains) +{ + _Py_hashtable_t *domains2 = tracemalloc_create_domains_table(); + if (domains2 == NULL) { + return NULL; + } + + int err = _Py_hashtable_foreach(domains, + tracemalloc_copy_domain, + domains2); + if (err) { + _Py_hashtable_destroy(domains2); + return NULL; + } + return domains2; +} + + static int tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entry, void *user_data) { get_traces_t *get_traces = user_data; - trace_t trace; + trace_t *trace; PyObject *tracemalloc_obj; int res; _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); - tracemalloc_obj = trace_to_pyobject(get_traces->domain, &trace, get_traces->tracebacks); + tracemalloc_obj = trace_to_pyobject(get_traces->domain, trace, get_traces->tracebacks); if (tracemalloc_obj == NULL) return 1; @@ -1335,37 +1412,34 @@ _tracemalloc__get_traces_impl(PyObject *module) goto no_memory; } - get_traces.domains = tracemalloc_create_domains_table(); - if (get_traces.domains == NULL) { - goto no_memory; - } - - int err; - // Copy all traces so tracemalloc_get_traces_fill() doesn't have to disable // temporarily tracemalloc which would impact other threads and so would // miss allocations while get_traces() is called. TABLES_LOCK(); - get_traces.traces = _Py_hashtable_copy(tracemalloc_traces); - err = _Py_hashtable_foreach(tracemalloc_domains, - tracemalloc_get_traces_copy_domain, - &get_traces); + get_traces.traces = tracemalloc_copy_traces(tracemalloc_traces); TABLES_UNLOCK(); if (get_traces.traces == NULL) { goto no_memory; } - if (err) { + + TABLES_LOCK(); + get_traces.domains = tracemalloc_copy_domains(tracemalloc_domains); + TABLES_UNLOCK(); + + if (get_traces.domains == NULL) { goto no_memory; } // Convert traces to a list of tuples set_reentrant(1); - err = _Py_hashtable_foreach(get_traces.traces, - tracemalloc_get_traces_fill, &get_traces); + int err = _Py_hashtable_foreach(get_traces.traces, + tracemalloc_get_traces_fill, + &get_traces); if (!err) { err = _Py_hashtable_foreach(get_traces.domains, - tracemalloc_get_traces_domain, &get_traces); + tracemalloc_get_traces_domain, + &get_traces); } set_reentrant(0); if (err) { @@ -1398,7 +1472,7 @@ _tracemalloc__get_traces_impl(PyObject *module) static traceback_t* tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) { - trace_t trace; + trace_t *trace; int found; if (!_Py_tracemalloc_config.tracing) @@ -1414,10 +1488,11 @@ tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) } TABLES_UNLOCK(); - if (!found) + if (!found) { return NULL; + } - return trace.traceback; + return trace->traceback; } @@ -1758,10 +1833,9 @@ _PyTraceMalloc_NewReference(PyObject *op) /* update the traceback of the memory block */ traceback_t *traceback = traceback_new(); if (traceback != NULL) { - trace_t trace; + trace_t *trace; _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace); - trace.traceback = traceback; - _Py_HASHTABLE_ENTRY_WRITE_DATA(tracemalloc_traces, entry, trace); + trace->traceback = traceback; res = 0; } } diff --git a/Python/hashtable.c b/Python/hashtable.c index 0c013bbccf557..e7681fb156519 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -350,21 +350,6 @@ _Py_hashtable_pop(_Py_hashtable_t *ht, const void *key, } -/* Code commented since the function is not needed in Python */ -#if 0 -void -_Py_hashtable_delete(_Py_hashtable_t *ht, size_t const void *key) -{ -#ifndef NDEBUG - int found = _Py_hashtable_pop_entry(ht, key, NULL, 0); - assert(found); -#else - (void)_Py_hashtable_pop_entry(ht, key, NULL, 0); -#endif -} -#endif - - int _Py_hashtable_foreach(_Py_hashtable_t *ht, _Py_hashtable_foreach_func func, @@ -538,37 +523,3 @@ _Py_hashtable_destroy(_Py_hashtable_t *ht) ht->alloc.free(ht->buckets); ht->alloc.free(ht); } - - -_Py_hashtable_t * -_Py_hashtable_copy(_Py_hashtable_t *src) -{ - const size_t data_size = src->data_size; - _Py_hashtable_t *dst; - _Py_hashtable_entry_t *entry; - size_t bucket; - int err; - - dst = _Py_hashtable_new_full(data_size, src->num_buckets, - src->hash_func, - src->compare_func, - src->key_destroy_func, - src->value_destroy_func, - &src->alloc); - if (dst == NULL) - return NULL; - - for (bucket=0; bucket < src->num_buckets; bucket++) { - entry = TABLE_HEAD(src, bucket); - for (; entry; entry = ENTRY_NEXT(entry)) { - const void *key = entry->key; - const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(entry); - err = _Py_hashtable_set(dst, key, data_size, pdata); - if (err) { - _Py_hashtable_destroy(dst); - return NULL; - } - } - } - return dst; -} From webhook-mailer at python.org Tue May 12 22:40:40 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 13 May 2020 02:40:40 -0000 Subject: [Python-checkins] bpo-40609: _Py_hashtable_t values become void* (GH-20065) Message-ID: https://github.com/python/cpython/commit/5b0a30354d8a8bb39a05ce10ca4f5c78b729f25b commit: 5b0a30354d8a8bb39a05ce10ca4f5c78b729f25b branch: master author: Victor Stinner committer: GitHub date: 2020-05-13T04:40:30+02:00 summary: bpo-40609: _Py_hashtable_t values become void* (GH-20065) _Py_hashtable_t values become regular "void *" pointers. * Add _Py_hashtable_entry_t.data member * Remove _Py_hashtable_t.data_size member * Remove _Py_hashtable_t.get_func member. It is no longer needed to specialize _Py_hashtable_get() for a specific value size, since all entries now have the same size (void*). * Remove the following macros: * _Py_HASHTABLE_GET() * _Py_HASHTABLE_SET() * _Py_HASHTABLE_SET_NODATA() * _Py_HASHTABLE_POP() * Rename _Py_hashtable_pop() to _Py_hashtable_steal() * _Py_hashtable_foreach() callback now gets key and value rather than entry. * Remove _Py_hashtable_value_destroy_func type. value_destroy_func callback now only has a single parameter: data (void*). files: M Include/internal/pycore_hashtable.h M Modules/_tracemalloc.c M Python/hashtable.c M Python/marshal.c diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 0da2ffdb389e5..2990f9e0c1cc6 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -31,20 +31,9 @@ typedef struct { Py_uhash_t key_hash; void *key; - /* data (data_size bytes) follows */ + void *value; } _Py_hashtable_entry_t; -#define _Py_HASHTABLE_ENTRY_PDATA(ENTRY) \ - ((const void *)((char *)(ENTRY) \ - + sizeof(_Py_hashtable_entry_t))) - -#define _Py_HASHTABLE_ENTRY_READ_DATA(TABLE, ENTRY, DATA) \ - do { \ - assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy(&(DATA), _Py_HASHTABLE_ENTRY_PDATA((ENTRY)), \ - sizeof(DATA)); \ - } while (0) - /* _Py_hashtable: prototypes */ @@ -55,12 +44,8 @@ typedef struct _Py_hashtable_t _Py_hashtable_t; typedef Py_uhash_t (*_Py_hashtable_hash_func) (const void *key); typedef int (*_Py_hashtable_compare_func) (const void *key1, const void *key2); typedef void (*_Py_hashtable_destroy_func) (void *key); -typedef void (*_Py_hashtable_value_destroy_func) (_Py_hashtable_t *ht, - _Py_hashtable_entry_t *entry); typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht, const void *key); -typedef int (*_Py_hashtable_get_func) (_Py_hashtable_t *ht, - const void *key, void *data); typedef struct { /* allocate a memory block */ @@ -76,14 +61,12 @@ struct _Py_hashtable_t { size_t num_buckets; size_t entries; /* Total number of entries in the table. */ _Py_slist_t *buckets; - size_t data_size; - _Py_hashtable_get_func get_func; _Py_hashtable_get_entry_func get_entry_func; _Py_hashtable_hash_func hash_func; _Py_hashtable_compare_func compare_func; _Py_hashtable_destroy_func key_destroy_func; - _Py_hashtable_value_destroy_func value_destroy_func; + _Py_hashtable_destroy_func value_destroy_func; _Py_hashtable_allocator_t alloc; }; @@ -96,17 +79,14 @@ PyAPI_FUNC(int) _Py_hashtable_compare_direct( const void *key2); PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new( - size_t data_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func); PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( - size_t data_size, - size_t init_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, _Py_hashtable_destroy_func key_destroy_func, - _Py_hashtable_value_destroy_func value_destroy_func, + _Py_hashtable_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator); PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); @@ -114,8 +94,8 @@ PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); PyAPI_FUNC(void) _Py_hashtable_clear(_Py_hashtable_t *ht); typedef int (*_Py_hashtable_foreach_func) (_Py_hashtable_t *ht, - _Py_hashtable_entry_t *entry, - void *arg); + const void *key, const void *value, + void *user_data); /* Call func() on each entry of the hashtable. Iteration stops if func() result is non-zero, in this case it's the result @@ -123,68 +103,42 @@ typedef int (*_Py_hashtable_foreach_func) (_Py_hashtable_t *ht, PyAPI_FUNC(int) _Py_hashtable_foreach( _Py_hashtable_t *ht, _Py_hashtable_foreach_func func, - void *arg); + void *user_data); -PyAPI_FUNC(size_t) _Py_hashtable_size(_Py_hashtable_t *ht); +PyAPI_FUNC(size_t) _Py_hashtable_size(const _Py_hashtable_t *ht); /* Add a new entry to the hash. The key must not be present in the hash table. - Return 0 on success, -1 on memory error. - - Don't call directly this function, - but use _Py_HASHTABLE_SET() and _Py_HASHTABLE_SET_NODATA() macros */ + Return 0 on success, -1 on memory error. */ PyAPI_FUNC(int) _Py_hashtable_set( _Py_hashtable_t *ht, const void *key, - size_t data_size, - const void *data); - -#define _Py_HASHTABLE_SET(TABLE, KEY, DATA) \ - _Py_hashtable_set(TABLE, (KEY), sizeof(DATA), &(DATA)) - -#define _Py_HASHTABLE_SET_NODATA(TABLE, KEY) \ - _Py_hashtable_set(TABLE, (KEY), 0, NULL) + void *value); /* Get an entry. - Return NULL if the key does not exist. - - Don't call directly this function, but use _Py_HASHTABLE_GET_ENTRY() - macro */ + Return NULL if the key does not exist. */ static inline _Py_hashtable_entry_t * _Py_hashtable_get_entry(_Py_hashtable_t *ht, const void *key) { return ht->get_entry_func(ht, key); } -#define _Py_HASHTABLE_GET_ENTRY(TABLE, KEY) \ - _Py_hashtable_get_entry(TABLE, (const void *)(KEY)) +/* Get value from an entry. + Return NULL if the entry is not found. -/* Get data from an entry. Copy entry data into data and return 1 if the entry - exists, return 0 if the entry does not exist. + Use _Py_hashtable_get_entry() to distinguish entry value equal to NULL + and entry not found. */ +extern void *_Py_hashtable_get(_Py_hashtable_t *ht, const void *key); - Don't call directly this function, but use _Py_HASHTABLE_GET() macro */ -static inline int -_Py_hashtable_get(_Py_hashtable_t *ht, const void *key, - size_t data_size, void *data) -{ - assert(data_size == ht->data_size); - return ht->get_func(ht, key, data); -} - -#define _Py_HASHTABLE_GET(TABLE, KEY, DATA) \ - _Py_hashtable_get(TABLE, (KEY), sizeof(DATA), &(DATA)) - -/* Don't call directly this function, but use _Py_HASHTABLE_POP() macro */ -PyAPI_FUNC(int) _Py_hashtable_pop( +// Remove a key and its associated value without calling key and value destroy +// functions. +// Return the removed value if the key was found. +// Return NULL if the key was not found. +PyAPI_FUNC(void*) _Py_hashtable_steal( _Py_hashtable_t *ht, - const void *key, - size_t data_size, - void *data); - -#define _Py_HASHTABLE_POP(TABLE, KEY, DATA) \ - _Py_hashtable_pop(TABLE, (KEY), sizeof(DATA), &(DATA)) + const void *key); #ifdef __cplusplus diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index a42349a8e4719..4522d1afde908 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -23,8 +23,8 @@ static void raw_free(void *ptr); # define TRACE_DEBUG #endif -#define TO_PTR(key) ((const void *)(uintptr_t)key) -#define FROM_PTR(key) ((uintptr_t)key) +#define TO_PTR(key) ((const void *)(uintptr_t)(key)) +#define FROM_PTR(key) ((uintptr_t)(key)) /* Protected by the GIL */ static struct { @@ -236,15 +236,15 @@ hashtable_hash_uint(const void *key_raw) static _Py_hashtable_t * -hashtable_new(size_t data_size, - _Py_hashtable_hash_func hash_func, +hashtable_new(_Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, - _Py_hashtable_value_destroy_func value_destroy_fun) + _Py_hashtable_destroy_func key_destroy_func, + _Py_hashtable_destroy_func value_destroy_func) { _Py_hashtable_allocator_t hashtable_alloc = {malloc, free}; - return _Py_hashtable_new_full(data_size, 0, - hash_func, compare_func, - NULL, value_destroy_fun, &hashtable_alloc); + return _Py_hashtable_new_full(hash_func, compare_func, + key_destroy_func, value_destroy_func, + &hashtable_alloc); } @@ -340,7 +340,7 @@ tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame) /* intern the filename */ _Py_hashtable_entry_t *entry; - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_filenames, filename); + entry = _Py_hashtable_get_entry(tracemalloc_filenames, filename); if (entry != NULL) { filename = (PyObject *)entry->key; } @@ -348,7 +348,7 @@ tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame) /* tracemalloc_filenames is responsible to keep a reference to the filename */ Py_INCREF(filename); - if (_Py_HASHTABLE_SET_NODATA(tracemalloc_filenames, filename) < 0) { + if (_Py_hashtable_set(tracemalloc_filenames, filename, NULL) < 0) { Py_DECREF(filename); #ifdef TRACE_DEBUG tracemalloc_error("failed to intern the filename"); @@ -435,7 +435,7 @@ traceback_new(void) traceback->hash = traceback_hash(traceback); /* intern the traceback */ - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_tracebacks, traceback); + entry = _Py_hashtable_get_entry(tracemalloc_tracebacks, traceback); if (entry != NULL) { traceback = (traceback_t *)entry->key; } @@ -454,7 +454,7 @@ traceback_new(void) } memcpy(copy, traceback, traceback_size); - if (_Py_HASHTABLE_SET_NODATA(tracemalloc_tracebacks, copy) < 0) { + if (_Py_hashtable_set(tracemalloc_tracebacks, copy, NULL) < 0) { raw_free(copy); #ifdef TRACE_DEBUG tracemalloc_error("failed to intern the traceback: putdata failed"); @@ -467,50 +467,22 @@ traceback_new(void) } -static void -tracemalloc_destroy_trace_cb(_Py_hashtable_t *traces, - _Py_hashtable_entry_t *entry) -{ - trace_t *trace; - _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); - raw_free(trace); -} - - static _Py_hashtable_t* tracemalloc_create_traces_table(void) { - return hashtable_new(sizeof(trace_t*), - _Py_hashtable_hash_ptr, + return hashtable_new(_Py_hashtable_hash_ptr, _Py_hashtable_compare_direct, - tracemalloc_destroy_trace_cb); -} - - -static void -tracemalloc_destroy_domain_table(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry) -{ - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); - _Py_hashtable_destroy(traces); + NULL, raw_free); } static _Py_hashtable_t* tracemalloc_create_domains_table(void) { - return hashtable_new(sizeof(_Py_hashtable_t *), - hashtable_hash_uint, + return hashtable_new(hashtable_hash_uint, _Py_hashtable_compare_direct, - tracemalloc_destroy_domain_table); -} - - -static void -tracemalloc_destroy_domains(_Py_hashtable_t *domains) -{ - _Py_hashtable_destroy(domains); + NULL, + (_Py_hashtable_destroy_func)_Py_hashtable_destroy); } @@ -521,9 +493,7 @@ tracemalloc_get_traces_table(unsigned int domain) return tracemalloc_traces; } else { - _Py_hashtable_t *traces = NULL; - (void)_Py_HASHTABLE_GET(tracemalloc_domains, TO_PTR(domain), traces); - return traces; + return _Py_hashtable_get(tracemalloc_domains, TO_PTR(domain)); } } @@ -538,8 +508,8 @@ tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) return; } - trace_t *trace; - if (!_Py_HASHTABLE_POP(traces, TO_PTR(ptr), trace)) { + trace_t *trace = _Py_hashtable_steal(traces, TO_PTR(ptr)); + if (!trace) { return; } assert(tracemalloc_traced_memory >= trace->size); @@ -569,17 +539,15 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, return -1; } - if (_Py_HASHTABLE_SET(tracemalloc_domains, TO_PTR(domain), traces) < 0) { + if (_Py_hashtable_set(tracemalloc_domains, TO_PTR(domain), traces) < 0) { _Py_hashtable_destroy(traces); return -1; } } - _Py_hashtable_entry_t* entry = _Py_HASHTABLE_GET_ENTRY(traces, ptr); - if (entry != NULL) { + trace_t *trace = _Py_hashtable_get(traces, TO_PTR(ptr)); + if (trace != NULL) { /* the memory block is already tracked */ - trace_t *trace; - _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); assert(tracemalloc_traced_memory >= trace->size); tracemalloc_traced_memory -= trace->size; @@ -587,14 +555,14 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, trace->traceback = traceback; } else { - trace_t *trace = raw_malloc(sizeof(trace_t)); + trace = raw_malloc(sizeof(trace_t)); if (trace == NULL) { return -1; } trace->size = size; trace->traceback = traceback; - int res = _Py_HASHTABLE_SET(traces, TO_PTR(ptr), trace); + int res = _Py_hashtable_set(traces, TO_PTR(ptr), trace); if (res != 0) { raw_free(trace); return res; @@ -860,23 +828,11 @@ tracemalloc_raw_realloc(void *ctx, void *ptr, size_t new_size) #endif /* TRACE_RAW_MALLOC */ -static int -tracemalloc_clear_filename(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, - void *user_data) +static void +tracemalloc_clear_filename(void *value) { - PyObject *filename = (PyObject *)entry->key; + PyObject *filename = (PyObject *)value; Py_DECREF(filename); - return 0; -} - - -static int -traceback_free_traceback(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, - void *user_data) -{ - traceback_t *traceback = (traceback_t *)entry->key; - raw_free(traceback); - return 0; } @@ -894,10 +850,8 @@ tracemalloc_clear_traces(void) tracemalloc_peak_traced_memory = 0; TABLES_UNLOCK(); - _Py_hashtable_foreach(tracemalloc_tracebacks, traceback_free_traceback, NULL); _Py_hashtable_clear(tracemalloc_tracebacks); - _Py_hashtable_foreach(tracemalloc_filenames, tracemalloc_clear_filename, NULL); _Py_hashtable_clear(tracemalloc_filenames); } @@ -937,15 +891,13 @@ tracemalloc_init(void) } #endif - tracemalloc_filenames = hashtable_new(0, - hashtable_hash_pyobject, + tracemalloc_filenames = hashtable_new(hashtable_hash_pyobject, hashtable_compare_unicode, - NULL); + tracemalloc_clear_filename, NULL); - tracemalloc_tracebacks = hashtable_new(0, - hashtable_hash_traceback, + tracemalloc_tracebacks = hashtable_new(hashtable_hash_traceback, hashtable_compare_traceback, - NULL); + NULL, raw_free); tracemalloc_traces = tracemalloc_create_traces_table(); tracemalloc_domains = tracemalloc_create_domains_table(); @@ -983,7 +935,7 @@ tracemalloc_deinit(void) tracemalloc_stop(); /* destroy hash tables */ - tracemalloc_destroy_domains(tracemalloc_domains); + _Py_hashtable_destroy(tracemalloc_domains); _Py_hashtable_destroy(tracemalloc_traces); _Py_hashtable_destroy(tracemalloc_tracebacks); _Py_hashtable_destroy(tracemalloc_filenames); @@ -1153,11 +1105,11 @@ frame_to_pyobject(frame_t *frame) static PyObject* traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) { - int i; - PyObject *frames, *frame; + PyObject *frames; if (intern_table != NULL) { - if (_Py_HASHTABLE_GET(intern_table, (const void *)traceback, frames)) { + frames = _Py_hashtable_get(intern_table, (const void *)traceback); + if (frames) { Py_INCREF(frames); return frames; } @@ -1167,8 +1119,8 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) if (frames == NULL) return NULL; - for (i=0; i < traceback->nframe; i++) { - frame = frame_to_pyobject(&traceback->frames[i]); + for (int i=0; i < traceback->nframe; i++) { + PyObject *frame = frame_to_pyobject(&traceback->frames[i]); if (frame == NULL) { Py_DECREF(frames); return NULL; @@ -1177,7 +1129,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) } if (intern_table != NULL) { - if (_Py_HASHTABLE_SET(intern_table, traceback, frames) < 0) { + if (_Py_hashtable_set(intern_table, traceback, frames) < 0) { Py_DECREF(frames); PyErr_NoMemory(); return NULL; @@ -1190,7 +1142,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) static PyObject* -trace_to_pyobject(unsigned int domain, trace_t *trace, +trace_to_pyobject(unsigned int domain, const trace_t *trace, _Py_hashtable_t *intern_tracebacks) { PyObject *trace_obj = NULL; @@ -1243,20 +1195,19 @@ typedef struct { static int tracemalloc_copy_trace(_Py_hashtable_t *traces, - _Py_hashtable_entry_t *entry, - void *traces2_raw) + const void *key, const void *value, + void *user_data) { - _Py_hashtable_t *traces2 = (_Py_hashtable_t *)traces2_raw; + _Py_hashtable_t *traces2 = (_Py_hashtable_t *)user_data; - trace_t *trace; - _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); + trace_t *trace = (trace_t *)value; trace_t *trace2 = raw_malloc(sizeof(trace_t)); if (traces2 == NULL) { return -1; } *trace2 = *trace; - if (_Py_HASHTABLE_SET(traces2, entry->key, trace2) < 0) { + if (_Py_hashtable_set(traces2, key, trace2) < 0) { raw_free(trace2); return -1; } @@ -1285,17 +1236,16 @@ tracemalloc_copy_traces(_Py_hashtable_t *traces) static int tracemalloc_copy_domain(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, - void *domains2_raw) + const void *key, const void *value, + void *user_data) { - _Py_hashtable_t *domains2 = (_Py_hashtable_t *)domains2_raw; + _Py_hashtable_t *domains2 = (_Py_hashtable_t *)user_data; - unsigned int domain = (unsigned int)FROM_PTR(entry->key); - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + unsigned int domain = (unsigned int)FROM_PTR(key); + _Py_hashtable_t *traces = (_Py_hashtable_t *)value; _Py_hashtable_t *traces2 = tracemalloc_copy_traces(traces); - if (_Py_HASHTABLE_SET(domains2, TO_PTR(domain), traces2) < 0) { + if (_Py_hashtable_set(domains2, TO_PTR(domain), traces2) < 0) { _Py_hashtable_destroy(traces2); return -1; } @@ -1323,24 +1273,25 @@ tracemalloc_copy_domains(_Py_hashtable_t *domains) static int -tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entry, +tracemalloc_get_traces_fill(_Py_hashtable_t *traces, + const void *key, const void *value, void *user_data) { get_traces_t *get_traces = user_data; - trace_t *trace; - PyObject *tracemalloc_obj; - int res; - _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); + const trace_t *trace = (const trace_t *)value; - tracemalloc_obj = trace_to_pyobject(get_traces->domain, trace, get_traces->tracebacks); - if (tracemalloc_obj == NULL) + PyObject *tuple = trace_to_pyobject(get_traces->domain, trace, + get_traces->tracebacks); + if (tuple == NULL) { return 1; + } - res = PyList_Append(get_traces->list, tracemalloc_obj); - Py_DECREF(tracemalloc_obj); - if (res < 0) + int res = PyList_Append(get_traces->list, tuple); + Py_DECREF(tuple); + if (res < 0) { return 1; + } return 0; } @@ -1348,14 +1299,13 @@ tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entr static int tracemalloc_get_traces_domain(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, + const void *key, const void *value, void *user_data) { get_traces_t *get_traces = user_data; - unsigned int domain = (unsigned int)FROM_PTR(entry->key); - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + unsigned int domain = (unsigned int)FROM_PTR(key); + _Py_hashtable_t *traces = (_Py_hashtable_t *)value; get_traces->domain = domain; return _Py_hashtable_foreach(traces, @@ -1365,11 +1315,9 @@ tracemalloc_get_traces_domain(_Py_hashtable_t *domains, static void -tracemalloc_pyobject_decref_cb(_Py_hashtable_t *tracebacks, - _Py_hashtable_entry_t *entry) +tracemalloc_pyobject_decref(void *value) { - PyObject *obj; - _Py_HASHTABLE_ENTRY_READ_DATA(tracebacks, entry, obj); + PyObject *obj = (PyObject *)value; Py_DECREF(obj); } @@ -1404,10 +1352,9 @@ _tracemalloc__get_traces_impl(PyObject *module) /* the traceback hash table is used temporarily to intern traceback tuple of (filename, lineno) tuples */ - get_traces.tracebacks = hashtable_new(sizeof(PyObject *), - _Py_hashtable_hash_ptr, + get_traces.tracebacks = hashtable_new(_Py_hashtable_hash_ptr, _Py_hashtable_compare_direct, - tracemalloc_pyobject_decref_cb); + NULL, tracemalloc_pyobject_decref); if (get_traces.tracebacks == NULL) { goto no_memory; } @@ -1462,7 +1409,7 @@ _tracemalloc__get_traces_impl(PyObject *module) _Py_hashtable_destroy(get_traces.traces); } if (get_traces.domains != NULL) { - tracemalloc_destroy_domains(get_traces.domains); + _Py_hashtable_destroy(get_traces.domains); } return get_traces.list; @@ -1472,23 +1419,22 @@ _tracemalloc__get_traces_impl(PyObject *module) static traceback_t* tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) { - trace_t *trace; - int found; if (!_Py_tracemalloc_config.tracing) return NULL; + trace_t *trace; TABLES_LOCK(); _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); if (traces) { - found = _Py_HASHTABLE_GET(traces, TO_PTR(ptr), trace); + trace = _Py_hashtable_get(traces, TO_PTR(ptr)); } else { - found = 0; + trace = NULL; } TABLES_UNLOCK(); - if (!found) { + if (!trace) { return NULL; } @@ -1634,12 +1580,10 @@ _tracemalloc_get_traceback_limit_impl(PyObject *module) static int tracemalloc_get_tracemalloc_memory_cb(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, + const void *key, const void *value, void *user_data) { - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); - + const _Py_hashtable_t *traces = value; size_t *size = (size_t*)user_data; *size += _Py_hashtable_size(traces); return 0; @@ -1827,14 +1771,11 @@ _PyTraceMalloc_NewReference(PyObject *op) int res = -1; TABLES_LOCK(); - _Py_hashtable_entry_t* entry; - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); - if (entry != NULL) { + trace_t *trace = _Py_hashtable_get(tracemalloc_traces, TO_PTR(ptr)); + if (trace != NULL) { /* update the traceback of the memory block */ traceback_t *traceback = traceback_new(); if (traceback != NULL) { - trace_t *trace; - _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace); trace->traceback = traceback; res = 0; } diff --git a/Python/hashtable.c b/Python/hashtable.c index e7681fb156519..dc4af3395181c 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -58,22 +58,6 @@ ((_Py_hashtable_entry_t *)_Py_SLIST_HEAD(&(HT)->buckets[BUCKET])) #define ENTRY_NEXT(ENTRY) \ ((_Py_hashtable_entry_t *)_Py_SLIST_ITEM_NEXT(ENTRY)) -#define HASHTABLE_ITEM_SIZE(HT) \ - (sizeof(_Py_hashtable_entry_t) + (HT)->data_size) - -#define ENTRY_READ_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ - do { \ - assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((PDATA), _Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ - (DATA_SIZE)); \ - } while (0) - -#define ENTRY_WRITE_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ - do { \ - assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ - (PDATA), (DATA_SIZE)); \ - } while (0) /* Forward declaration */ static void hashtable_rehash(_Py_hashtable_t *ht); @@ -133,7 +117,7 @@ round_size(size_t s) size_t -_Py_hashtable_size(_Py_hashtable_t *ht) +_Py_hashtable_size(const _Py_hashtable_t *ht) { size_t size; @@ -143,7 +127,7 @@ _Py_hashtable_size(_Py_hashtable_t *ht) size += ht->num_buckets * sizeof(_Py_hashtable_entry_t *); /* entries */ - size += ht->entries * HASHTABLE_ITEM_SIZE(ht); + size += ht->entries * sizeof(_Py_hashtable_entry_t); return size; } @@ -209,11 +193,9 @@ _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) } -static int -_Py_hashtable_pop_entry(_Py_hashtable_t *ht, const void *key, - void *data, size_t data_size) +void* +_Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) { - Py_uhash_t key_hash = ht->hash_func(key); size_t index = key_hash & (ht->num_buckets - 1); @@ -222,7 +204,7 @@ _Py_hashtable_pop_entry(_Py_hashtable_t *ht, const void *key, while (1) { if (entry == NULL) { // not found - return 0; + return NULL; } if (entry->key_hash == key_hash && ht->compare_func(key, entry->key)) { break; @@ -235,23 +217,21 @@ _Py_hashtable_pop_entry(_Py_hashtable_t *ht, const void *key, (_Py_slist_item_t *)entry); ht->entries--; - if (data != NULL) - ENTRY_READ_PDATA(ht, entry, data_size, data); + void *value = entry->value; ht->alloc.free(entry); - if ((float)ht->entries / (float)ht->num_buckets < HASHTABLE_LOW) + if ((float)ht->entries / (float)ht->num_buckets < HASHTABLE_LOW) { hashtable_rehash(ht); - return 1; + } + return value; } int -_Py_hashtable_set(_Py_hashtable_t *ht, const void *key, - size_t data_size, const void *data) +_Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value) { _Py_hashtable_entry_t *entry; - assert(data != NULL || data_size == 0); #ifndef NDEBUG /* Don't write the assertion on a single line because it is interesting to know the duplicated entry if the assertion failed. The entry can @@ -263,7 +243,7 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, Py_uhash_t key_hash = ht->hash_func(key); size_t index = key_hash & (ht->num_buckets - 1); - entry = ht->alloc.malloc(HASHTABLE_ITEM_SIZE(ht)); + entry = ht->alloc.malloc(sizeof(_Py_hashtable_entry_t)); if (entry == NULL) { /* memory allocation failed */ return -1; @@ -271,9 +251,7 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, entry->key_hash = key_hash; entry->key = (void *)key; - if (data) { - ENTRY_WRITE_PDATA(ht, entry, data_size, data); - } + entry->value = value; _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); ht->entries++; @@ -284,17 +262,15 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, } -int -_Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *key, void *data) +void* +_Py_hashtable_get(_Py_hashtable_t *ht, const void *key) { - assert(data != NULL); _Py_hashtable_entry_t *entry = ht->get_entry_func(ht, key); if (entry != NULL) { - ENTRY_READ_PDATA(ht, entry, ht->data_size, data); - return 1; + return entry->value; } else { - return 0; + return NULL; } } @@ -323,44 +299,17 @@ _Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) } -// Specialized for: -// hash_func == _Py_hashtable_hash_ptr -// compare_func == _Py_hashtable_compare_direct -int -_Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *key, void *data) -{ - assert(data != NULL); - _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry_ptr(ht, key); - if (entry != NULL) { - ENTRY_READ_PDATA(ht, entry, ht->data_size, data); - return 1; - } - else { - return 0; - } -} - - -int -_Py_hashtable_pop(_Py_hashtable_t *ht, const void *key, - size_t data_size, void *data) -{ - assert(data != NULL); - return _Py_hashtable_pop_entry(ht, key, data, data_size); -} - - int _Py_hashtable_foreach(_Py_hashtable_t *ht, _Py_hashtable_foreach_func func, - void *arg) + void *user_data) { _Py_hashtable_entry_t *entry; size_t hv; for (hv = 0; hv < ht->num_buckets; hv++) { for (entry = TABLE_HEAD(ht, hv); entry; entry = ENTRY_NEXT(entry)) { - int res = func(ht, entry, arg); + int res = func(ht, entry->key, entry->value, user_data); if (res) return res; } @@ -414,11 +363,10 @@ hashtable_rehash(_Py_hashtable_t *ht) _Py_hashtable_t * -_Py_hashtable_new_full(size_t data_size, size_t init_size, - _Py_hashtable_hash_func hash_func, +_Py_hashtable_new_full(_Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, _Py_hashtable_destroy_func key_destroy_func, - _Py_hashtable_value_destroy_func value_destroy_func, + _Py_hashtable_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator) { _Py_hashtable_t *ht; @@ -437,9 +385,8 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, if (ht == NULL) return ht; - ht->num_buckets = round_size(init_size); + ht->num_buckets = HASHTABLE_MIN_SIZE; ht->entries = 0; - ht->data_size = data_size; buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); ht->buckets = alloc.malloc(buckets_size); @@ -449,7 +396,6 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, } memset(ht->buckets, 0, buckets_size); - ht->get_func = _Py_hashtable_get_generic; ht->get_entry_func = _Py_hashtable_get_entry_generic; ht->hash_func = hash_func; ht->compare_func = compare_func; @@ -459,7 +405,6 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, if (ht->hash_func == _Py_hashtable_hash_ptr && ht->compare_func == _Py_hashtable_compare_direct) { - ht->get_func = _Py_hashtable_get_ptr; ht->get_entry_func = _Py_hashtable_get_entry_ptr; } return ht; @@ -467,16 +412,27 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, _Py_hashtable_t * -_Py_hashtable_new(size_t data_size, - _Py_hashtable_hash_func hash_func, +_Py_hashtable_new(_Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func) { - return _Py_hashtable_new_full(data_size, HASHTABLE_MIN_SIZE, - hash_func, compare_func, + return _Py_hashtable_new_full(hash_func, compare_func, NULL, NULL, NULL); } +static void +_Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry) +{ + if (ht->key_destroy_func) { + ht->key_destroy_func(entry->key); + } + if (ht->value_destroy_func) { + ht->value_destroy_func(entry->value); + } + ht->alloc.free(entry); +} + + void _Py_hashtable_clear(_Py_hashtable_t *ht) { @@ -486,7 +442,7 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) for (i=0; i < ht->num_buckets; i++) { for (entry = TABLE_HEAD(ht, i); entry != NULL; entry = next) { next = ENTRY_NEXT(entry); - ht->alloc.free(entry); + _Py_hashtable_destroy_entry(ht, entry); } _Py_slist_init(&ht->buckets[i]); } @@ -495,19 +451,6 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) } -static void -_Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry) -{ - if (ht->key_destroy_func) { - ht->key_destroy_func(entry->key); - } - if (ht->value_destroy_func) { - ht->value_destroy_func(ht, entry); - } - ht->alloc.free(entry); -} - - void _Py_hashtable_destroy(_Py_hashtable_t *ht) { diff --git a/Python/marshal.c b/Python/marshal.c index 7c99c1ee13c0e..b096ff8932220 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -302,10 +302,10 @@ w_ref(PyObject *v, char *flag, WFILE *p) if (Py_REFCNT(v) == 1) return 0; - entry = _Py_HASHTABLE_GET_ENTRY(p->hashtable, v); + entry = _Py_hashtable_get_entry(p->hashtable, v); if (entry != NULL) { /* write the reference index to the stream */ - _Py_HASHTABLE_ENTRY_READ_DATA(p->hashtable, entry, w); + w = (int)(uintptr_t)entry->value; /* we don't store "long" indices in the dict */ assert(0 <= w && w <= 0x7fffffff); w_byte(TYPE_REF, p); @@ -320,7 +320,7 @@ w_ref(PyObject *v, char *flag, WFILE *p) } w = (int)s; Py_INCREF(v); - if (_Py_HASHTABLE_SET(p->hashtable, v, w) < 0) { + if (_Py_hashtable_set(p->hashtable, v, (void *)(uintptr_t)w) < 0) { Py_DECREF(v); goto err; } @@ -556,8 +556,7 @@ static int w_init_refs(WFILE *wf, int version) { if (version >= 3) { - wf->hashtable = _Py_hashtable_new_full(sizeof(int), 0, - _Py_hashtable_hash_ptr, + wf->hashtable = _Py_hashtable_new_full(_Py_hashtable_hash_ptr, _Py_hashtable_compare_direct, w_decref_entry, NULL, NULL); if (wf->hashtable == NULL) { From webhook-mailer at python.org Tue May 12 23:36:31 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 13 May 2020 03:36:31 -0000 Subject: [Python-checkins] bpo-40602: Optimize _Py_hashtable_get_ptr() (GH-20066) Message-ID: https://github.com/python/cpython/commit/42bae3a3d9d79f28e6b3b619bd27296d125c4c2c commit: 42bae3a3d9d79f28e6b3b619bd27296d125c4c2c branch: master author: Victor Stinner committer: GitHub date: 2020-05-13T05:36:23+02:00 summary: bpo-40602: Optimize _Py_hashtable_get_ptr() (GH-20066) _Py_hashtable_get_entry_ptr() avoids comparing the entry hash: compare directly keys. Move _Py_hashtable_get_entry_ptr() just after _Py_hashtable_get_entry_generic(). files: M Python/hashtable.c diff --git a/Python/hashtable.c b/Python/hashtable.c index dc4af3395181c..d1467ad94ed55 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -193,6 +193,29 @@ _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) } +// Specialized for: +// hash_func == _Py_hashtable_hash_ptr +// compare_func == _Py_hashtable_compare_direct +static _Py_hashtable_entry_t * +_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) +{ + Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); + size_t index = key_hash & (ht->num_buckets - 1); + _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + while (1) { + if (entry == NULL) { + return NULL; + } + // Compare directly keys (ignore entry->key_hash) + if (entry->key == key) { + break; + } + entry = ENTRY_NEXT(entry); + } + return entry; +} + + void* _Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) { @@ -275,30 +298,6 @@ _Py_hashtable_get(_Py_hashtable_t *ht, const void *key) } -// Specialized for: -// hash_func == _Py_hashtable_hash_ptr -// compare_func == _Py_hashtable_compare_direct -_Py_hashtable_entry_t * -_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) -{ - Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); - size_t index = key_hash & (ht->num_buckets - 1); - _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); - while (1) { - if (entry == NULL) { - return NULL; - } - if (entry->key_hash == key_hash) { - if (entry->key == key) { - break; - } - } - entry = ENTRY_NEXT(entry); - } - return entry; -} - - int _Py_hashtable_foreach(_Py_hashtable_t *ht, _Py_hashtable_foreach_func func, From webhook-mailer at python.org Wed May 13 06:29:57 2020 From: webhook-mailer at python.org (Tzanetos Balitsaris) Date: Wed, 13 May 2020 10:29:57 -0000 Subject: [Python-checkins] bpo-40331: Increase test coverage for the statistics module (GH-19608) Message-ID: https://github.com/python/cpython/commit/b809717c1ead26b4e3693b8a5505dd8f8f666f08 commit: b809717c1ead26b4e3693b8a5505dd8f8f666f08 branch: master author: Tzanetos Balitsaris committer: GitHub date: 2020-05-13T13:29:31+03:00 summary: bpo-40331: Increase test coverage for the statistics module (GH-19608) files: M Lib/test/test_statistics.py diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 0e46a7119f0ef..5c3b1fdd8b110 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -1004,6 +1004,10 @@ def test_nan(self): x = statistics._convert(nan, type(nan)) self.assertTrue(_nan_equal(x, nan)) + def test_invalid_input_type(self): + with self.assertRaises(TypeError): + statistics._convert(None, float) + class FailNegTest(unittest.TestCase): """Test _fail_neg private function.""" @@ -1033,6 +1037,50 @@ def test_error_msg(self): self.assertEqual(errmsg, msg) +class FindLteqTest(unittest.TestCase): + # Test _find_lteq private function. + + def test_invalid_input_values(self): + for a, x in [ + ([], 1), + ([1, 2], 3), + ([1, 3], 2) + ]: + with self.subTest(a=a, x=x): + with self.assertRaises(ValueError): + statistics._find_lteq(a, x) + + def test_locate_successfully(self): + for a, x, expected_i in [ + ([1, 1, 1, 2, 3], 1, 0), + ([0, 1, 1, 1, 2, 3], 1, 1), + ([1, 2, 3, 3, 3], 3, 2) + ]: + with self.subTest(a=a, x=x): + self.assertEqual(expected_i, statistics._find_lteq(a, x)) + + +class FindRteqTest(unittest.TestCase): + # Test _find_rteq private function. + + def test_invalid_input_values(self): + for a, l, x in [ + ([1], 2, 1), + ([1, 3], 0, 2) + ]: + with self.assertRaises(ValueError): + statistics._find_rteq(a, l, x) + + def test_locate_successfully(self): + for a, l, x, expected_i in [ + ([1, 1, 1, 2, 3], 0, 1, 2), + ([0, 1, 1, 1, 2, 3], 0, 1, 3), + ([1, 2, 3, 3, 3], 0, 3, 4) + ]: + with self.subTest(a=a, l=l, x=x): + self.assertEqual(expected_i, statistics._find_rteq(a, l, x)) + + # === Tests for public functions === class UnivariateCommonMixin: @@ -1476,6 +1524,18 @@ def test_negative_error(self): with self.subTest(values=values): self.assertRaises(exc, self.func, values) + def test_invalid_type_error(self): + # Test error is raised when input contains invalid type(s) + for data in [ + ['3.14'], # single string + ['1', '2', '3'], # multiple strings + [1, '2', 3, '4', 5], # mixed strings and valid integers + [2.3, 3.4, 4.5, '5.6'] # only one string and valid floats + ]: + with self.subTest(data=data): + with self.assertRaises(TypeError): + self.func(data) + def test_ints(self): # Test harmonic mean with ints. data = [2, 4, 4, 8, 16, 16] From webhook-mailer at python.org Wed May 13 09:38:36 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Wed, 13 May 2020 13:38:36 -0000 Subject: [Python-checkins] bpo-40613: Remove compiler warning from _xxsubinterpretersmodule (GH-20069) Message-ID: https://github.com/python/cpython/commit/fa0a66e62d087765dbc5c1b89d6149a23ecfb0a6 commit: fa0a66e62d087765dbc5c1b89d6149a23ecfb0a6 branch: master author: Dong-hee Na committer: GitHub date: 2020-05-13T22:38:27+09:00 summary: bpo-40613: Remove compiler warning from _xxsubinterpretersmodule (GH-20069) files: M Modules/_xxsubinterpretersmodule.c diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 9c5df16e156a1..18dd8918e7c89 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -329,6 +329,7 @@ _objsnapshot_clear(_objsnapshot *osn) // PyMem_Free(osn); //} +#ifndef NDEBUG static int _objsnapshot_is_clear(_objsnapshot *osn) { @@ -336,6 +337,7 @@ _objsnapshot_is_clear(_objsnapshot *osn) && _rawstring_is_clear(&osn->modname) && _rawstring_is_clear(&osn->clsname); } +#endif static void _objsnapshot_summarize(_objsnapshot *osn, _rawstring *rawbuf, const char *msg) @@ -597,6 +599,7 @@ _tbsnapshot_free(_tbsnapshot *tbs) PyMem_Free(tbs); } +#ifndef NDEBUG static int _tbsnapshot_is_clear(_tbsnapshot *tbs) { @@ -604,6 +607,7 @@ _tbsnapshot_is_clear(_tbsnapshot *tbs) && _rawstring_is_clear(&tbs->tbs_funcname) && _rawstring_is_clear(&tbs->tbs_filename); } +#endif static int _tbsnapshot_from_pytb(_tbsnapshot *tbs, PyTracebackObject *pytb) @@ -748,6 +752,7 @@ _excsnapshot_free(_excsnapshot *es) PyMem_Free(es); } +#ifndef NDEBUG static int _excsnapshot_is_clear(_excsnapshot *es) { @@ -758,6 +763,7 @@ _excsnapshot_is_clear(_excsnapshot *es) && es->es_msg == NULL && _objsnapshot_is_clear(&es->es_object); } +#endif static PyObject * _excsnapshot_get_exc_naive(_excsnapshot *es) @@ -1085,6 +1091,7 @@ _sharedexception_free(_sharedexception *she) PyMem_Free(she); } +#ifndef NDEBUG static int _sharedexception_is_clear(_sharedexception *she) { @@ -1092,6 +1099,7 @@ _sharedexception_is_clear(_sharedexception *she) && _excsnapshot_is_clear(&she->snapshot) && _rawstring_is_clear(&she->msg); } +#endif static PyObject * _sharedexception_get_cause(_sharedexception *sharedexc) From webhook-mailer at python.org Wed May 13 14:55:17 2020 From: webhook-mailer at python.org (jack1142) Date: Wed, 13 May 2020 18:55:17 -0000 Subject: [Python-checkins] bpo-34790: add version of removal of explicit passing of coros to `asyncio.wait`'s documentation (#20008) Message-ID: https://github.com/python/cpython/commit/de92769d473d1c0955d36da2fc71462621326f00 commit: de92769d473d1c0955d36da2fc71462621326f00 branch: master author: jack1142 <6032823+jack1142 at users.noreply.github.com> committer: GitHub date: 2020-05-13T11:55:12-07:00 summary: bpo-34790: add version of removal of explicit passing of coros to `asyncio.wait`'s documentation (#20008) files: A Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst M Doc/library/asyncio-task.rst diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 6627bec79823a..42e2b4e2fc5b9 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -575,7 +575,7 @@ Waiting Primitives if task in done: # Everything will work as expected now. - .. deprecated:: 3.8 + .. deprecated-removed:: 3.8 3.11 Passing coroutine objects to ``wait()`` directly is deprecated. diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst new file mode 100644 index 0000000000000..4f349adff3346 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst @@ -0,0 +1 @@ +Add version of removal for explicit passing of coros to `asyncio.wait()`'s documentation \ No newline at end of file From webhook-mailer at python.org Wed May 13 15:36:40 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Wed, 13 May 2020 19:36:40 -0000 Subject: [Python-checkins] bpo-40334: Always show the caret on SyntaxErrors (GH-20050) Message-ID: https://github.com/python/cpython/commit/a15c9b3a0524e5ca0434d2ad11076677824af941 commit: a15c9b3a0524e5ca0434d2ad11076677824af941 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-13T20:36:27+01:00 summary: bpo-40334: Always show the caret on SyntaxErrors (GH-20050) This commit fixes SyntaxError locations when the caret is not displayed, by doing the following: - `col_number` always gets set to the location of the offending node/expr. When no caret is to be displayed, this gets achieved by setting the object holding the error line to None. - Introduce a new function `_PyPegen_raise_error_known_location`, which can be called, when an arbitrary `lineno`/`col_offset` needs to be passed. This function then gets used in the grammar (through some new macros and inline functions) so that SyntaxError locations of the new parser match that of the old. files: M Grammar/python.gram M Lib/test/test_exceptions.py M Parser/pegen/parse.c M Parser/pegen/pegen.c M Parser/pegen/pegen.h diff --git a/Grammar/python.gram b/Grammar/python.gram index 0542107cac3e6..84c89330e3ee9 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -623,26 +623,31 @@ t_atom[expr_ty]: # From here on, there are rules for invalid syntax with specialised error messages incorrect_arguments: | args ',' '*' { RAISE_SYNTAX_ERROR("iterable argument unpacking follows keyword argument unpacking") } - | expression for_if_clauses ',' [args | expression for_if_clauses] { - RAISE_SYNTAX_ERROR("Generator expression must be parenthesized") } + | a=expression for_if_clauses ',' [args | expression for_if_clauses] { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "Generator expression must be parenthesized") } | a=args ',' args { _PyPegen_arguments_parsing_error(p, a) } invalid_kwarg: - | expression '=' { RAISE_SYNTAX_ERROR("expression cannot contain assignment, perhaps you meant \"==\"?") } + | a=expression '=' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, "expression cannot contain assignment, perhaps you meant \"==\"?") } invalid_named_expression: | a=expression ':=' expression { - RAISE_SYNTAX_ERROR("cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, "cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } invalid_assignment: - | list ':' { RAISE_SYNTAX_ERROR("only single target (not list) can be annotated") } - | tuple ':' { RAISE_SYNTAX_ERROR("only single target (not tuple) can be annotated") } - | expression ':' expression ['=' annotated_rhs] { - RAISE_SYNTAX_ERROR("illegal target for annotation") } + | a=list ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not list) can be annotated") } + | a=tuple ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } + | a=star_named_expression ',' star_named_expressions* ':' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } + | a=expression ':' expression ['=' annotated_rhs] { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } | a=expression ('=' | augassign) (yield_expr | star_expressions) { - RAISE_SYNTAX_ERROR_NO_COL_OFFSET("cannot assign to %s", _PyPegen_get_expr_name(a)) } + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot assign to %s", _PyPegen_get_expr_name(a)) } invalid_block: | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") } invalid_comprehension: - | ('[' | '(' | '{') '*' expression for_if_clauses { - RAISE_SYNTAX_ERROR("iterable unpacking cannot be used in comprehension") } + | ('[' | '(' | '{') a=starred_expression for_if_clauses { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "iterable unpacking cannot be used in comprehension") } invalid_parameters: | param_no_default* (slash_with_default | param_with_default+) param_no_default { RAISE_SYNTAX_ERROR("non-default argument follows default argument") } @@ -655,4 +660,4 @@ invalid_double_type_comments: RAISE_SYNTAX_ERROR("Cannot have two type comments on def") } invalid_del_target: | a=star_expression &del_target_end { - RAISE_SYNTAX_ERROR("cannot delete %s", _PyPegen_get_expr_name(a)) } + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot delete %s", _PyPegen_get_expr_name(a)) } diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index dbd7fa6bdd938..b689ec7aed18d 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -242,16 +242,13 @@ def baz(): check('from __future__ import doesnt_exist', 1, 1) check('from __future__ import braces', 1, 1) check('x=1\nfrom __future__ import division', 2, 1) - check('(yield i) = 2', 1, 1) + check('foo(1=2)', 1, 5) + check('def f():\n x, y: int', 2, 3) + check('[*x for x in xs]', 1, 2) + check('foo(x for x in range(10), 100)', 1, 5) + check('(yield i) = 2', 1, 1 if support.use_old_parser() else 2) check('def f(*):\n pass', 1, 7 if support.use_old_parser() else 8) - check('foo(1=2)', 1, 5 if support.use_old_parser() else 6) - - @support.skip_if_new_parser("Pegen column offsets might be different") - def testSyntaxErrorOffsetCustom(self): - self.check('for 1 in []: pass', 1, 5) - self.check('[*x for x in xs]', 1, 2) - self.check('def f():\n x, y: int', 2, 3) - self.check('foo(x for x in range(10), 100)', 1, 5) + check('for 1 in []: pass', 1, 5 if support.use_old_parser() else 7) @cpython_only def testSettingException(self): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index a1a6f4c06bf63..b1b248187ea3e 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -346,12 +346,12 @@ static KeywordToken *reserved_keywords[] = { #define _loop0_124_type 1275 #define _gather_123_type 1276 #define _tmp_125_type 1277 -#define _tmp_126_type 1278 +#define _loop0_126_type 1278 #define _tmp_127_type 1279 #define _tmp_128_type 1280 #define _tmp_129_type 1281 -#define _loop0_130_type 1282 -#define _tmp_131_type 1283 +#define _tmp_130_type 1282 +#define _loop0_131_type 1283 #define _tmp_132_type 1284 #define _tmp_133_type 1285 #define _tmp_134_type 1286 @@ -365,9 +365,10 @@ static KeywordToken *reserved_keywords[] = { #define _tmp_142_type 1294 #define _tmp_143_type 1295 #define _tmp_144_type 1296 -#define _loop1_145_type 1297 -#define _tmp_146_type 1298 +#define _tmp_145_type 1297 +#define _loop1_146_type 1298 #define _tmp_147_type 1299 +#define _tmp_148_type 1300 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -647,12 +648,12 @@ static asdl_seq *_gather_121_rule(Parser *p); static asdl_seq *_loop0_124_rule(Parser *p); static asdl_seq *_gather_123_rule(Parser *p); static void *_tmp_125_rule(Parser *p); -static void *_tmp_126_rule(Parser *p); +static asdl_seq *_loop0_126_rule(Parser *p); static void *_tmp_127_rule(Parser *p); static void *_tmp_128_rule(Parser *p); static void *_tmp_129_rule(Parser *p); -static asdl_seq *_loop0_130_rule(Parser *p); -static void *_tmp_131_rule(Parser *p); +static void *_tmp_130_rule(Parser *p); +static asdl_seq *_loop0_131_rule(Parser *p); static void *_tmp_132_rule(Parser *p); static void *_tmp_133_rule(Parser *p); static void *_tmp_134_rule(Parser *p); @@ -666,9 +667,10 @@ static void *_tmp_141_rule(Parser *p); static void *_tmp_142_rule(Parser *p); static void *_tmp_143_rule(Parser *p); static void *_tmp_144_rule(Parser *p); -static asdl_seq *_loop1_145_rule(Parser *p); -static void *_tmp_146_rule(Parser *p); +static void *_tmp_145_rule(Parser *p); +static asdl_seq *_loop1_146_rule(Parser *p); static void *_tmp_147_rule(Parser *p); +static void *_tmp_148_rule(Parser *p); // file: statements? $ @@ -10629,10 +10631,10 @@ incorrect_arguments_rule(Parser *p) Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - expr_ty expression_var; + expr_ty a; asdl_seq* for_if_clauses_var; if ( - (expression_var = expression_rule(p)) // expression + (a = expression_rule(p)) // expression && (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses && @@ -10641,7 +10643,7 @@ incorrect_arguments_rule(Parser *p) (_opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] ) { - _res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10687,14 +10689,14 @@ invalid_kwarg_rule(Parser *p) int _mark = p->mark; { // expression '=' Token * _literal; - expr_ty expression_var; + expr_ty a; if ( - (expression_var = expression_rule(p)) // expression + (a = expression_rule(p)) // expression && (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - _res = RAISE_SYNTAX_ERROR ( "expression cannot contain assignment, perhaps you meant \"==\"?" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "expression cannot contain assignment, perhaps you meant \"==\"?" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10729,7 +10731,7 @@ invalid_named_expression_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - _res = RAISE_SYNTAX_ERROR ( "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10746,6 +10748,7 @@ invalid_named_expression_rule(Parser *p) // invalid_assignment: // | list ':' // | tuple ':' +// | star_named_expression ',' star_named_expressions* ':' // | expression ':' expression ['=' annotated_rhs] // | expression ('=' | augassign) (yield_expr | star_expressions) static void * @@ -10758,14 +10761,14 @@ invalid_assignment_rule(Parser *p) int _mark = p->mark; { // list ':' Token * _literal; - expr_ty list_var; + expr_ty a; if ( - (list_var = list_rule(p)) // list + (a = list_rule(p)) // list && (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - _res = RAISE_SYNTAX_ERROR ( "only single target (not list) can be annotated" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not list) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10776,14 +10779,38 @@ invalid_assignment_rule(Parser *p) } { // tuple ':' Token * _literal; - expr_ty tuple_var; + expr_ty a; if ( - (tuple_var = tuple_rule(p)) // tuple + (a = tuple_rule(p)) // tuple && (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - _res = RAISE_SYNTAX_ERROR ( "only single target (not tuple) can be annotated" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + { // star_named_expression ',' star_named_expressions* ':' + Token * _literal; + Token * _literal_1; + asdl_seq * _loop0_126_var; + expr_ty a; + if ( + (a = star_named_expression_rule(p)) // star_named_expression + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_loop0_126_var = _loop0_126_rule(p)) // star_named_expressions* + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10796,19 +10823,19 @@ invalid_assignment_rule(Parser *p) Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; expr_ty expression_var; - expr_ty expression_var_1; if ( - (expression_var = expression_rule(p)) // expression + (a = expression_rule(p)) // expression && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (expression_var_1 = expression_rule(p)) // expression + (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_126_rule(p), 1) // ['=' annotated_rhs] + (_opt_var = _tmp_127_rule(p), 1) // ['=' annotated_rhs] ) { - _res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "illegal target for annotation" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10818,18 +10845,18 @@ invalid_assignment_rule(Parser *p) p->mark = _mark; } { // expression ('=' | augassign) (yield_expr | star_expressions) - void *_tmp_127_var; void *_tmp_128_var; + void *_tmp_129_var; expr_ty a; if ( (a = expression_rule(p)) // expression && - (_tmp_127_var = _tmp_127_rule(p)) // '=' | augassign + (_tmp_128_var = _tmp_128_rule(p)) // '=' | augassign && - (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions + (_tmp_129_var = _tmp_129_rule(p)) // yield_expr | star_expressions ) { - _res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10874,7 +10901,7 @@ invalid_block_rule(Parser *p) return _res; } -// invalid_comprehension: ('[' | '(' | '{') '*' expression for_if_clauses +// invalid_comprehension: ('[' | '(' | '{') starred_expression for_if_clauses static void * invalid_comprehension_rule(Parser *p) { @@ -10883,22 +10910,19 @@ invalid_comprehension_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // ('[' | '(' | '{') '*' expression for_if_clauses - Token * _literal; - void *_tmp_129_var; - expr_ty expression_var; + { // ('[' | '(' | '{') starred_expression for_if_clauses + void *_tmp_130_var; + expr_ty a; asdl_seq* for_if_clauses_var; if ( - (_tmp_129_var = _tmp_129_rule(p)) // '[' | '(' | '{' - && - (_literal = _PyPegen_expect_token(p, 16)) // token='*' + (_tmp_130_var = _tmp_130_rule(p)) // '[' | '(' | '{' && - (expression_var = expression_rule(p)) // expression + (a = starred_expression_rule(p)) // starred_expression && (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - _res = RAISE_SYNTAX_ERROR ( "iterable unpacking cannot be used in comprehension" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "iterable unpacking cannot be used in comprehension" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10923,13 +10947,13 @@ invalid_parameters_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // param_no_default* (slash_with_default | param_with_default+) param_no_default - asdl_seq * _loop0_130_var; - void *_tmp_131_var; + asdl_seq * _loop0_131_var; + void *_tmp_132_var; arg_ty param_no_default_var; if ( - (_loop0_130_var = _loop0_130_rule(p)) // param_no_default* + (_loop0_131_var = _loop0_131_rule(p)) // param_no_default* && - (_tmp_131_var = _tmp_131_rule(p)) // slash_with_default | param_with_default+ + (_tmp_132_var = _tmp_132_rule(p)) // slash_with_default | param_with_default+ && (param_no_default_var = param_no_default_rule(p)) // param_no_default ) @@ -10959,11 +10983,11 @@ invalid_star_etc_rule(Parser *p) int _mark = p->mark; { // '*' (')' | ',' (')' | '**')) Token * _literal; - void *_tmp_132_var; + void *_tmp_133_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_132_var = _tmp_132_rule(p)) // ')' | ',' (')' | '**') + (_tmp_133_var = _tmp_133_rule(p)) // ')' | ',' (')' | '**') ) { _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); @@ -10991,11 +11015,11 @@ invalid_lambda_star_etc_rule(Parser *p) int _mark = p->mark; { // '*' (':' | ',' (':' | '**')) Token * _literal; - void *_tmp_133_var; + void *_tmp_134_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_133_var = _tmp_133_rule(p)) // ':' | ',' (':' | '**') + (_tmp_134_var = _tmp_134_rule(p)) // ':' | ',' (':' | '**') ) { _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); @@ -11070,7 +11094,7 @@ invalid_del_target_rule(Parser *p) _PyPegen_lookahead(1, del_target_end_rule, p) ) { - _res = RAISE_SYNTAX_ERROR ( "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -11979,12 +12003,12 @@ _loop1_22_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (star_targets '=') - void *_tmp_134_var; + void *_tmp_135_var; while ( - (_tmp_134_var = _tmp_134_rule(p)) // star_targets '=' + (_tmp_135_var = _tmp_135_rule(p)) // star_targets '=' ) { - _res = _tmp_134_var; + _res = _tmp_135_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -12306,12 +12330,12 @@ _loop0_30_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('.' | '...') - void *_tmp_135_var; + void *_tmp_136_var; while ( - (_tmp_135_var = _tmp_135_rule(p)) // '.' | '...' + (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' ) { - _res = _tmp_135_var; + _res = _tmp_136_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -12355,12 +12379,12 @@ _loop1_31_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('.' | '...') - void *_tmp_136_var; + void *_tmp_137_var; while ( - (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' + (_tmp_137_var = _tmp_137_rule(p)) // '.' | '...' ) { - _res = _tmp_136_var; + _res = _tmp_137_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -13937,12 +13961,12 @@ _loop1_67_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('@' named_expression NEWLINE) - void *_tmp_137_var; + void *_tmp_138_var; while ( - (_tmp_137_var = _tmp_137_rule(p)) // '@' named_expression NEWLINE + (_tmp_138_var = _tmp_138_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_137_var; + _res = _tmp_138_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -14110,12 +14134,12 @@ _loop1_71_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' star_expression) - void *_tmp_138_var; + void *_tmp_139_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) // ',' star_expression + (_tmp_139_var = _tmp_139_rule(p)) // ',' star_expression ) { - _res = _tmp_138_var; + _res = _tmp_139_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -14248,12 +14272,12 @@ _loop1_74_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' expression) - void *_tmp_139_var; + void *_tmp_140_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) // ',' expression + (_tmp_140_var = _tmp_140_rule(p)) // ',' expression ) { - _res = _tmp_139_var; + _res = _tmp_140_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -15015,12 +15039,12 @@ _loop1_89_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('or' conjunction) - void *_tmp_140_var; + void *_tmp_141_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) // 'or' conjunction + (_tmp_141_var = _tmp_141_rule(p)) // 'or' conjunction ) { - _res = _tmp_140_var; + _res = _tmp_141_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -15068,12 +15092,12 @@ _loop1_90_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('and' inversion) - void *_tmp_141_var; + void *_tmp_142_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) // 'and' inversion + (_tmp_142_var = _tmp_142_rule(p)) // 'and' inversion ) { - _res = _tmp_141_var; + _res = _tmp_142_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -15723,12 +15747,12 @@ _loop0_105_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('if' disjunction) - void *_tmp_142_var; + void *_tmp_143_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) // 'if' disjunction + (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction ) { - _res = _tmp_142_var; + _res = _tmp_143_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -15772,12 +15796,12 @@ _loop0_106_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('if' disjunction) - void *_tmp_143_var; + void *_tmp_144_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction + (_tmp_144_var = _tmp_144_rule(p)) // 'if' disjunction ) { - _res = _tmp_143_var; + _res = _tmp_144_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -16225,12 +16249,12 @@ _loop0_117_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' star_target) - void *_tmp_144_var; + void *_tmp_145_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) // ',' star_target + (_tmp_145_var = _tmp_145_rule(p)) // ',' star_target ) { - _res = _tmp_144_var; + _res = _tmp_145_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -16577,9 +16601,58 @@ _tmp_125_rule(Parser *p) return _res; } -// _tmp_126: '=' annotated_rhs +// _loop0_126: star_named_expressions +static asdl_seq * +_loop0_126_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // star_named_expressions + asdl_seq* star_named_expressions_var; + while ( + (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions + ) + { + _res = star_named_expressions_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_126"); + PyMem_Free(_children); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_126_type, _seq); + return _seq; +} + +// _tmp_127: '=' annotated_rhs static void * -_tmp_126_rule(Parser *p) +_tmp_127_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16605,9 +16678,9 @@ _tmp_126_rule(Parser *p) return _res; } -// _tmp_127: '=' | augassign +// _tmp_128: '=' | augassign static void * -_tmp_127_rule(Parser *p) +_tmp_128_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16641,9 +16714,9 @@ _tmp_127_rule(Parser *p) return _res; } -// _tmp_128: yield_expr | star_expressions +// _tmp_129: yield_expr | star_expressions static void * -_tmp_128_rule(Parser *p) +_tmp_129_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16677,9 +16750,9 @@ _tmp_128_rule(Parser *p) return _res; } -// _tmp_129: '[' | '(' | '{' +// _tmp_130: '[' | '(' | '{' static void * -_tmp_129_rule(Parser *p) +_tmp_130_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16724,9 +16797,9 @@ _tmp_129_rule(Parser *p) return _res; } -// _loop0_130: param_no_default +// _loop0_131: param_no_default static asdl_seq * -_loop0_130_rule(Parser *p) +_loop0_131_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16763,19 +16836,19 @@ _loop0_130_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_130"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_131"); PyMem_Free(_children); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_130_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_131_type, _seq); return _seq; } -// _tmp_131: slash_with_default | param_with_default+ +// _tmp_132: slash_with_default | param_with_default+ static void * -_tmp_131_rule(Parser *p) +_tmp_132_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16794,12 +16867,12 @@ _tmp_131_rule(Parser *p) p->mark = _mark; } { // param_with_default+ - asdl_seq * _loop1_145_var; + asdl_seq * _loop1_146_var; if ( - (_loop1_145_var = _loop1_145_rule(p)) // param_with_default+ + (_loop1_146_var = _loop1_146_rule(p)) // param_with_default+ ) { - _res = _loop1_145_var; + _res = _loop1_146_var; goto done; } p->mark = _mark; @@ -16809,9 +16882,9 @@ _tmp_131_rule(Parser *p) return _res; } -// _tmp_132: ')' | ',' (')' | '**') +// _tmp_133: ')' | ',' (')' | '**') static void * -_tmp_132_rule(Parser *p) +_tmp_133_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16831,14 +16904,14 @@ _tmp_132_rule(Parser *p) } { // ',' (')' | '**') Token * _literal; - void *_tmp_146_var; + void *_tmp_147_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_146_var = _tmp_146_rule(p)) // ')' | '**' + (_tmp_147_var = _tmp_147_rule(p)) // ')' | '**' ) { - _res = _PyPegen_dummy_name(p, _literal, _tmp_146_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_147_var); goto done; } p->mark = _mark; @@ -16848,9 +16921,9 @@ _tmp_132_rule(Parser *p) return _res; } -// _tmp_133: ':' | ',' (':' | '**') +// _tmp_134: ':' | ',' (':' | '**') static void * -_tmp_133_rule(Parser *p) +_tmp_134_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16870,14 +16943,14 @@ _tmp_133_rule(Parser *p) } { // ',' (':' | '**') Token * _literal; - void *_tmp_147_var; + void *_tmp_148_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_147_var = _tmp_147_rule(p)) // ':' | '**' + (_tmp_148_var = _tmp_148_rule(p)) // ':' | '**' ) { - _res = _PyPegen_dummy_name(p, _literal, _tmp_147_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_148_var); goto done; } p->mark = _mark; @@ -16887,9 +16960,9 @@ _tmp_133_rule(Parser *p) return _res; } -// _tmp_134: star_targets '=' +// _tmp_135: star_targets '=' static void * -_tmp_134_rule(Parser *p) +_tmp_135_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16919,9 +16992,9 @@ _tmp_134_rule(Parser *p) return _res; } -// _tmp_135: '.' | '...' +// _tmp_136: '.' | '...' static void * -_tmp_135_rule(Parser *p) +_tmp_136_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16955,9 +17028,9 @@ _tmp_135_rule(Parser *p) return _res; } -// _tmp_136: '.' | '...' +// _tmp_137: '.' | '...' static void * -_tmp_136_rule(Parser *p) +_tmp_137_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16991,9 +17064,9 @@ _tmp_136_rule(Parser *p) return _res; } -// _tmp_137: '@' named_expression NEWLINE +// _tmp_138: '@' named_expression NEWLINE static void * -_tmp_137_rule(Parser *p) +_tmp_138_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17026,9 +17099,9 @@ _tmp_137_rule(Parser *p) return _res; } -// _tmp_138: ',' star_expression +// _tmp_139: ',' star_expression static void * -_tmp_138_rule(Parser *p) +_tmp_139_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17058,9 +17131,9 @@ _tmp_138_rule(Parser *p) return _res; } -// _tmp_139: ',' expression +// _tmp_140: ',' expression static void * -_tmp_139_rule(Parser *p) +_tmp_140_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17090,9 +17163,9 @@ _tmp_139_rule(Parser *p) return _res; } -// _tmp_140: 'or' conjunction +// _tmp_141: 'or' conjunction static void * -_tmp_140_rule(Parser *p) +_tmp_141_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17122,9 +17195,9 @@ _tmp_140_rule(Parser *p) return _res; } -// _tmp_141: 'and' inversion +// _tmp_142: 'and' inversion static void * -_tmp_141_rule(Parser *p) +_tmp_142_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17154,9 +17227,9 @@ _tmp_141_rule(Parser *p) return _res; } -// _tmp_142: 'if' disjunction +// _tmp_143: 'if' disjunction static void * -_tmp_142_rule(Parser *p) +_tmp_143_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17186,9 +17259,9 @@ _tmp_142_rule(Parser *p) return _res; } -// _tmp_143: 'if' disjunction +// _tmp_144: 'if' disjunction static void * -_tmp_143_rule(Parser *p) +_tmp_144_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17218,9 +17291,9 @@ _tmp_143_rule(Parser *p) return _res; } -// _tmp_144: ',' star_target +// _tmp_145: ',' star_target static void * -_tmp_144_rule(Parser *p) +_tmp_145_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17250,9 +17323,9 @@ _tmp_144_rule(Parser *p) return _res; } -// _loop1_145: param_with_default +// _loop1_146: param_with_default static asdl_seq * -_loop1_145_rule(Parser *p) +_loop1_146_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17293,19 +17366,19 @@ _loop1_145_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_145"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_146"); PyMem_Free(_children); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_145_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_146_type, _seq); return _seq; } -// _tmp_146: ')' | '**' +// _tmp_147: ')' | '**' static void * -_tmp_146_rule(Parser *p) +_tmp_147_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17339,9 +17412,9 @@ _tmp_146_rule(Parser *p) return _res; } -// _tmp_147: ':' | '**' +// _tmp_148: ':' | '**' static void * -_tmp_147_rule(Parser *p) +_tmp_148_rule(Parser *p) { if (p->error_indicator) { return NULL; diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 5f8c862c1f88b..083088bd9657b 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -380,48 +380,57 @@ tokenizer_error(Parser *p) } void * -_PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const char *errmsg, ...) +_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) +{ + Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1]; + int col_offset; + if (t->col_offset == -1) { + col_offset = Py_SAFE_DOWNCAST(p->tok->cur - p->tok->buf, + intptr_t, int); + } else { + col_offset = t->col_offset + 1; + } + + va_list va; + va_start(va, errmsg); + _PyPegen_raise_error_known_location(p, errtype, t->lineno, + col_offset, errmsg, va); + va_end(va); + + return NULL; +} + + +void * +_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, + int lineno, int col_offset, + const char *errmsg, va_list va) { PyObject *value = NULL; PyObject *errstr = NULL; - PyObject *loc = NULL; + PyObject *error_line = NULL; PyObject *tmp = NULL; - Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1]; - Py_ssize_t col_number = !with_col_number; - va_list va; p->error_indicator = 1; - va_start(va, errmsg); errstr = PyUnicode_FromFormatV(errmsg, va); - va_end(va); if (!errstr) { goto error; } if (p->start_rule == Py_file_input) { - loc = PyErr_ProgramTextObject(p->tok->filename, t->lineno); + error_line = PyErr_ProgramTextObject(p->tok->filename, lineno); } - if (!loc) { - loc = get_error_line(p->tok->buf, p->start_rule == Py_file_input); - } - - if (loc && with_col_number) { - int col_offset; - if (t->col_offset == -1) { - col_offset = Py_SAFE_DOWNCAST(p->tok->cur - p->tok->buf, - intptr_t, int); - } else { - col_offset = t->col_offset + 1; + if (!error_line) { + error_line = get_error_line(p->tok->buf, p->start_rule == Py_file_input); + if (!error_line) { + goto error; } - col_number = byte_offset_to_character_offset(loc, col_offset); - } - else if (!loc) { - Py_INCREF(Py_None); - loc = Py_None; } - tmp = Py_BuildValue("(OiiN)", p->tok->filename, t->lineno, col_number, loc); + int col_number = byte_offset_to_character_offset(error_line, col_offset); + + tmp = Py_BuildValue("(OiiN)", p->tok->filename, lineno, col_number, error_line); if (!tmp) { goto error; } @@ -438,7 +447,7 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const ch error: Py_XDECREF(errstr); - Py_XDECREF(loc); + Py_XDECREF(error_line); return NULL; } diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index b55a652ac8060..e5b1b757bd894 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -127,15 +127,32 @@ expr_ty _PyPegen_name_token(Parser *p); expr_ty _PyPegen_number_token(Parser *p); void *_PyPegen_string_token(Parser *p); const char *_PyPegen_get_expr_name(expr_ty); -void *_PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const char *errmsg, ...); +void *_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...); +void *_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, + int lineno, int col_offset, + const char *errmsg, va_list va); void *_PyPegen_dummy_name(Parser *p, ...); +Py_LOCAL_INLINE(void *) +RAISE_ERROR_KNOWN_LOCATION(Parser *p, PyObject *errtype, int lineno, + int col_offset, const char *errmsg, ...) +{ + va_list va; + va_start(va, errmsg); + _PyPegen_raise_error_known_location(p, errtype, lineno, col_offset + 1, + errmsg, va); + va_end(va); + return NULL; +} + + #define UNUSED(expr) do { (void)(expr); } while (0) #define EXTRA_EXPR(head, tail) head->lineno, head->col_offset, tail->end_lineno, tail->end_col_offset, p->arena #define EXTRA _start_lineno, _start_col_offset, _end_lineno, _end_col_offset, p->arena -#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 1, msg, ##__VA_ARGS__) -#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, 1, msg, ##__VA_ARGS__) -#define RAISE_SYNTAX_ERROR_NO_COL_OFFSET(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 0, msg, ##__VA_ARGS__) +#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, msg, ##__VA_ARGS__) +#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, msg, ##__VA_ARGS__) +#define RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, msg, ...) \ + RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, a->lineno, a->col_offset, msg, ##__VA_ARGS__) Py_LOCAL_INLINE(void *) CHECK_CALL(Parser *p, void *result) From webhook-mailer at python.org Wed May 13 18:31:39 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 13 May 2020 22:31:39 -0000 Subject: [Python-checkins] bpo-38787: Fix Argument Clinic defining_class_converter (GH-20074) Message-ID: https://github.com/python/cpython/commit/97e1568325e4d8eff2fc80eeb174b3f3e5d1c350 commit: 97e1568325e4d8eff2fc80eeb174b3f3e5d1c350 branch: master author: Victor Stinner committer: GitHub date: 2020-05-14T00:31:31+02:00 summary: bpo-38787: Fix Argument Clinic defining_class_converter (GH-20074) Don't hardcode defining_class parameter name to "cls": * Define CConverter.set_template_dict(): do nothing by default * CLanguage.render_function() now calls set_template_dict() on all converters. files: M Tools/clinic/clinic.py diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index 281a749a935cc..b07ffdd928f15 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -724,7 +724,7 @@ def output_templates(self, f): parser_prototype_def_class = normalize_snippet(""" static PyObject * - {c_basename}({self_type}{self_name}, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) + {c_basename}({self_type}{self_name}, PyTypeObject *{defining_class_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) """) # parser_body_fields remembers the fields passed in to the @@ -1305,7 +1305,8 @@ def render_function(self, clinic, f): template_dict['docstring'] = self.docstring_for_c_string(f) template_dict['self_name'] = template_dict['self_type'] = template_dict['self_type_check'] = '' - f_self.converter.set_template_dict(template_dict) + for converter in converters: + converter.set_template_dict(template_dict) f.return_converter.render(f, data) template_dict['impl_return_type'] = f.return_converter.type @@ -2698,6 +2699,10 @@ def parse_arg(self, argname, displayname): """.format(argname=argname, paramname=self.name, cast=cast) return None + def set_template_dict(self, template_dict): + pass + + type_checks = { '&PyLong_Type': ('PyLong_Check', 'int'), '&PyTuple_Type': ('PyTuple_Check', 'tuple'), From webhook-mailer at python.org Wed May 13 18:42:06 2020 From: webhook-mailer at python.org (Michael Graczyk) Date: Wed, 13 May 2020 22:42:06 -0000 Subject: [Python-checkins] issue-25872: Fix KeyError using linecache from multiple threads (GH-18007) Message-ID: https://github.com/python/cpython/commit/d72ea605218bbee6ae46648997d9bb76d0fba460 commit: d72ea605218bbee6ae46648997d9bb76d0fba460 branch: master author: Michael Graczyk committer: GitHub date: 2020-05-13T18:41:57-04:00 summary: issue-25872: Fix KeyError using linecache from multiple threads (GH-18007) The crash that this fixes occurs when using traceback and other modules from multiple threads; del cache[filename] can raise a KeyError. files: M Lib/linecache.py diff --git a/Lib/linecache.py b/Lib/linecache.py index ddd0abf2cf01d..fa5dbd09eab86 100644 --- a/Lib/linecache.py +++ b/Lib/linecache.py @@ -71,10 +71,10 @@ def checkcache(filename=None): try: stat = os.stat(fullname) except OSError: - del cache[filename] + cache.pop(filename, None) continue if size != stat.st_size or mtime != stat.st_mtime: - del cache[filename] + cache.pop(filename, None) def updatecache(filename, module_globals=None): @@ -84,7 +84,7 @@ def updatecache(filename, module_globals=None): if filename in cache: if len(cache[filename]) != 1: - del cache[filename] + cache.pop(filename, None) if not filename or (filename.startswith('<') and filename.endswith('>')): return [] From webhook-mailer at python.org Wed May 13 19:11:59 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 13 May 2020 23:11:59 -0000 Subject: [Python-checkins] bpo-39465: Remove _PyUnicode_ClearStaticStrings() from C API (GH-20078) Message-ID: https://github.com/python/cpython/commit/d6fb53fe42d83a10f1372dd92ffaa6a01d2feffb commit: d6fb53fe42d83a10f1372dd92ffaa6a01d2feffb branch: master author: Victor Stinner committer: GitHub date: 2020-05-14T01:11:54+02:00 summary: bpo-39465: Remove _PyUnicode_ClearStaticStrings() from C API (GH-20078) Remove the _PyUnicode_ClearStaticStrings() function from the C API. Make the function fully private (declare it with "static"). files: A Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst M Doc/whatsnew/3.9.rst M Include/cpython/object.h M Include/cpython/unicodeobject.h M Objects/unicodeobject.c diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index c57d702dce867..2fec790fe3a63 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -964,3 +964,6 @@ Removed * ``PyTuple_ClearFreeList()`` * ``PyUnicode_ClearFreeList()``: the Unicode free list has been removed in Python 3.3. + +* Remove ``_PyUnicode_ClearStaticStrings()`` function. + (Contributed by Victor Stinner in :issue:`39465`.) diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 8bf05a3271183..444f832f5bd8d 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -36,7 +36,7 @@ PyAPI_FUNC(Py_ssize_t) _Py_GetRefTotal(void); PyId_foo is a static variable, either on block level or file level. On first usage, the string "foo" is interned, and the structures are linked. On interpreter - shutdown, all strings are released (through _PyUnicode_ClearStaticStrings). + shutdown, all strings are released. Alternatively, _Py_static_string allows choosing the variable name. _PyUnicode_FromId returns a borrowed reference to the interned string. diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 94326876292b6..4fd674ffea36e 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -1215,8 +1215,6 @@ Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) PyUnicode_AsUnicodeCopy( /* Return an interned Unicode object for an Identifier; may fail if there is no memory.*/ PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*); -/* Clear all static strings. */ -PyAPI_FUNC(void) _PyUnicode_ClearStaticStrings(void); /* Fast equality check when the inputs are known to be exact unicode types and where the hash values are equal (i.e. a very probable match) */ diff --git a/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst b/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst new file mode 100644 index 0000000000000..a08c3da566045 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst @@ -0,0 +1 @@ +Remove the ``_PyUnicode_ClearStaticStrings()`` function from the C API. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 826298c23a924..34b747ec7bb7e 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -2289,8 +2289,8 @@ _PyUnicode_FromId(_Py_Identifier *id) return id->object; } -void -_PyUnicode_ClearStaticStrings() +static void +unicode_clear_static_strings(void) { _Py_Identifier *tmp, *s = static_strings; while (s) { @@ -16196,7 +16196,7 @@ _PyUnicode_Fini(PyThreadState *tstate) Py_CLEAR(unicode_latin1[i]); } #endif - _PyUnicode_ClearStaticStrings(); + unicode_clear_static_strings(); } _PyUnicode_FiniEncodings(tstate); From webhook-mailer at python.org Wed May 13 19:18:32 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Wed, 13 May 2020 23:18:32 -0000 Subject: [Python-checkins] bpo-29587: Make gen.throw() chain exceptions with yield from (GH-19858) Message-ID: https://github.com/python/cpython/commit/75cd8e48c62c97fdb9d9a94fd2335be06084471d commit: 75cd8e48c62c97fdb9d9a94fd2335be06084471d branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-13T16:18:27-07:00 summary: bpo-29587: Make gen.throw() chain exceptions with yield from (GH-19858) The previous commits on bpo-29587 got exception chaining working with gen.throw() in the `yield` case. This patch also gets the `yield from` case working. As a consequence, implicit exception chaining now also works in the asyncio scenario of awaiting on a task when an exception is already active. Tests are included for both the asyncio case and the pure generator-only case. files: M Lib/test/test_asyncio/test_tasks.py M Lib/test/test_generators.py M Objects/genobject.c diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 68f3b8cce9f65..6eb6b46ec8af7 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -466,6 +466,33 @@ async def inner2(): t = outer() self.assertEqual(self.loop.run_until_complete(t), 1042) + def test_exception_chaining_after_await(self): + # Test that when awaiting on a task when an exception is already + # active, if the task raises an exception it will be chained + # with the original. + loop = asyncio.new_event_loop() + self.set_event_loop(loop) + + async def raise_error(): + raise ValueError + + async def run(): + try: + raise KeyError(3) + except Exception as exc: + task = self.new_task(loop, raise_error()) + try: + await task + except Exception as exc: + self.assertEqual(type(exc), ValueError) + chained = exc.__context__ + self.assertEqual((type(chained), chained.args), + (KeyError, (3,))) + + task = self.new_task(loop, run()) + loop.run_until_complete(task) + loop.close() + def test_cancel(self): def gen(): diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index e047801199680..1081107ee64ac 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -318,7 +318,7 @@ def g(): class GeneratorThrowTest(unittest.TestCase): - def test_exception_context_set(self): + def test_exception_context_with_yield(self): def f(): try: raise KeyError('a') @@ -332,6 +332,23 @@ def f(): context = cm.exception.__context__ self.assertEqual((type(context), context.args), (KeyError, ('a',))) + def test_exception_context_with_yield_from(self): + def f(): + yield + + def g(): + try: + raise KeyError('a') + except Exception: + yield from f() + + gen = g() + gen.send(None) + with self.assertRaises(ValueError) as cm: + gen.throw(ValueError) + context = cm.exception.__context__ + self.assertEqual((type(context), context.args), (KeyError, ('a',))) + def test_throw_after_none_exc_type(self): def g(): try: diff --git a/Objects/genobject.c b/Objects/genobject.c index 5b253edfdcd0f..fb01e581f8ae1 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -217,6 +217,18 @@ gen_send_ex(PyGenObject *gen, PyObject *arg, int exc, int closing) assert(f->f_back == NULL); f->f_back = tstate->frame; + _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state; + if (exc && gi_exc_state->exc_type != NULL && + gi_exc_state->exc_type != Py_None) + { + Py_INCREF(gi_exc_state->exc_type); + Py_XINCREF(gi_exc_state->exc_value); + Py_XINCREF(gi_exc_state->exc_traceback); + _PyErr_ChainExceptions(gi_exc_state->exc_type, + gi_exc_state->exc_value, + gi_exc_state->exc_traceback); + } + gen->gi_running = 1; gen->gi_exc_state.previous_item = tstate->exc_info; tstate->exc_info = &gen->gi_exc_state; @@ -512,16 +524,6 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, } PyErr_Restore(typ, val, tb); - - _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state; - if (gi_exc_state->exc_type != NULL && gi_exc_state->exc_type != Py_None) { - Py_INCREF(gi_exc_state->exc_type); - Py_XINCREF(gi_exc_state->exc_value); - Py_XINCREF(gi_exc_state->exc_traceback); - _PyErr_ChainExceptions(gi_exc_state->exc_type, - gi_exc_state->exc_value, - gi_exc_state->exc_traceback); - } return gen_send_ex(gen, Py_None, 1, 0); failed_throw: From webhook-mailer at python.org Wed May 13 19:48:42 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 13 May 2020 23:48:42 -0000 Subject: [Python-checkins] bpo-40521: Add PyInterpreterState.unicode (GH-20081) Message-ID: https://github.com/python/cpython/commit/3d17c045b4c3d09b72bbd95ed78af1ae6f0d98d2 commit: 3d17c045b4c3d09b72bbd95ed78af1ae6f0d98d2 branch: master author: Victor Stinner committer: GitHub date: 2020-05-14T01:48:38+02:00 summary: bpo-40521: Add PyInterpreterState.unicode (GH-20081) Move PyInterpreterState.fs_codec into a new PyInterpreterState.unicode structure. Give a name to the fs_codec structure and use this structure in unicodeobject.c. files: M Include/internal/pycore_interp.h M Modules/_io/textio.c M Objects/unicodeobject.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 26e7a473a12dc..f04ea330d0457 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -51,6 +51,19 @@ struct _ceval_state { #endif }; +/* fs_codec.encoding is initialized to NULL. + Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */ +struct _Py_unicode_fs_codec { + char *encoding; // Filesystem encoding (encoded to UTF-8) + int utf8; // encoding=="utf-8"? + char *errors; // Filesystem errors (encoded to UTF-8) + _Py_error_handler error_handler; +}; + +struct _Py_unicode_state { + struct _Py_unicode_fs_codec fs_codec; +}; + /* interpreter state */ @@ -97,14 +110,7 @@ struct _is { PyObject *codec_error_registry; int codecs_initialized; - /* fs_codec.encoding is initialized to NULL. - Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */ - struct { - char *encoding; /* Filesystem encoding (encoded to UTF-8) */ - int utf8; /* encoding=="utf-8"? */ - char *errors; /* Filesystem errors (encoded to UTF-8) */ - _Py_error_handler error_handler; - } fs_codec; + struct _Py_unicode_state unicode; PyConfig config; #ifdef HAVE_DLOPEN diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c index 1abc9ca6f206a..f2c72ebd51658 100644 --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -1007,7 +1007,7 @@ io_check_errors(PyObject *errors) /* Avoid calling PyCodec_LookupError() before the codec registry is ready: before_PyUnicode_InitEncodings() is called. */ - if (!interp->fs_codec.encoding) { + if (!interp->unicode.fs_codec.encoding) { return 0; } diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 34b747ec7bb7e..ea46a44bf5faa 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -463,7 +463,7 @@ unicode_check_encoding_errors(const char *encoding, const char *errors) /* Avoid calling _PyCodec_Lookup() and PyCodec_LookupError() before the codec registry is ready: before_PyUnicode_InitEncodings() is called. */ - if (!interp->fs_codec.encoding) { + if (!interp->unicode.fs_codec.encoding) { return 0; } @@ -3650,16 +3650,17 @@ PyObject * PyUnicode_EncodeFSDefault(PyObject *unicode) { PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->fs_codec.utf8) { + struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec; + if (fs_codec->utf8) { return unicode_encode_utf8(unicode, - interp->fs_codec.error_handler, - interp->fs_codec.errors); + fs_codec->error_handler, + fs_codec->errors); } #ifndef _Py_FORCE_UTF8_FS_ENCODING - else if (interp->fs_codec.encoding) { + else if (fs_codec->encoding) { return PyUnicode_AsEncodedString(unicode, - interp->fs_codec.encoding, - interp->fs_codec.errors); + fs_codec->encoding, + fs_codec->errors); } #endif else { @@ -3886,17 +3887,18 @@ PyObject* PyUnicode_DecodeFSDefaultAndSize(const char *s, Py_ssize_t size) { PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->fs_codec.utf8) { + struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec; + if (fs_codec->utf8) { return unicode_decode_utf8(s, size, - interp->fs_codec.error_handler, - interp->fs_codec.errors, + fs_codec->error_handler, + fs_codec->errors, NULL); } #ifndef _Py_FORCE_UTF8_FS_ENCODING - else if (interp->fs_codec.encoding) { + else if (fs_codec->encoding) { return PyUnicode_Decode(s, size, - interp->fs_codec.encoding, - interp->fs_codec.errors); + fs_codec->encoding, + fs_codec->errors); } #endif else { @@ -16071,16 +16073,17 @@ init_fs_codec(PyInterpreterState *interp) return -1; } - PyMem_RawFree(interp->fs_codec.encoding); - interp->fs_codec.encoding = encoding; + struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec; + PyMem_RawFree(fs_codec->encoding); + fs_codec->encoding = encoding; /* encoding has been normalized by init_fs_encoding() */ - interp->fs_codec.utf8 = (strcmp(encoding, "utf-8") == 0); - PyMem_RawFree(interp->fs_codec.errors); - interp->fs_codec.errors = errors; - interp->fs_codec.error_handler = error_handler; + fs_codec->utf8 = (strcmp(encoding, "utf-8") == 0); + PyMem_RawFree(fs_codec->errors); + fs_codec->errors = errors; + fs_codec->error_handler = error_handler; #ifdef _Py_FORCE_UTF8_FS_ENCODING - assert(interp->fs_codec.utf8 == 1); + assert(fs_codec->utf8 == 1); #endif /* At this point, PyUnicode_EncodeFSDefault() and @@ -16089,8 +16092,8 @@ init_fs_codec(PyInterpreterState *interp) /* Set Py_FileSystemDefaultEncoding and Py_FileSystemDefaultEncodeErrors global configuration variables. */ - if (_Py_SetFileSystemEncoding(interp->fs_codec.encoding, - interp->fs_codec.errors) < 0) { + if (_Py_SetFileSystemEncoding(fs_codec->encoding, + fs_codec->errors) < 0) { PyErr_NoMemory(); return -1; } @@ -16133,15 +16136,14 @@ _PyUnicode_InitEncodings(PyThreadState *tstate) static void -_PyUnicode_FiniEncodings(PyThreadState *tstate) +_PyUnicode_FiniEncodings(struct _Py_unicode_fs_codec *fs_codec) { - PyInterpreterState *interp = tstate->interp; - PyMem_RawFree(interp->fs_codec.encoding); - interp->fs_codec.encoding = NULL; - interp->fs_codec.utf8 = 0; - PyMem_RawFree(interp->fs_codec.errors); - interp->fs_codec.errors = NULL; - interp->fs_codec.error_handler = _Py_ERROR_UNKNOWN; + PyMem_RawFree(fs_codec->encoding); + fs_codec->encoding = NULL; + fs_codec->utf8 = 0; + PyMem_RawFree(fs_codec->errors); + fs_codec->errors = NULL; + fs_codec->error_handler = _Py_ERROR_UNKNOWN; } @@ -16199,7 +16201,7 @@ _PyUnicode_Fini(PyThreadState *tstate) unicode_clear_static_strings(); } - _PyUnicode_FiniEncodings(tstate); + _PyUnicode_FiniEncodings(&tstate->interp->unicode.fs_codec); } From webhook-mailer at python.org Wed May 13 20:53:36 2020 From: webhook-mailer at python.org (Arkadiusz Hiler) Date: Thu, 14 May 2020 00:53:36 -0000 Subject: [Python-checkins] bpo-40597: email: Use CTE if lines are longer than max_line_length consistently (gh-20038) Message-ID: https://github.com/python/cpython/commit/6f2f475d5a2cd7675dce844f3af436ba919ef92b commit: 6f2f475d5a2cd7675dce844f3af436ba919ef92b branch: master author: Arkadiusz Hiler committer: GitHub date: 2020-05-13T20:53:26-04:00 summary: bpo-40597: email: Use CTE if lines are longer than max_line_length consistently (gh-20038) raw_data_manager (default for EmailPolicy, EmailMessage) does correct wrapping of 'text' parts as long as the message contains characters outside of 7bit US-ASCII set: base64 or qp Content-Transfer-Encoding is applied if the lines would be too long without it. It did not, however, do this for ascii-only text, which could result in lines that were longer than policy.max_line_length or even the rfc 998 maximum. This changeset fixes the heuristic so that if lines are longer than policy.max_line_length, it will always apply a content-transfer-encoding so that the lines are wrapped correctly. files: A Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst M Lib/email/contentmanager.py M Lib/test/test_email/test_contentmanager.py diff --git a/Lib/email/contentmanager.py b/Lib/email/contentmanager.py index b904ded94c92e..2b4b8757f46f6 100644 --- a/Lib/email/contentmanager.py +++ b/Lib/email/contentmanager.py @@ -146,13 +146,13 @@ def embedded_body(lines): return linesep.join(lines) + linesep def normal_body(lines): return b'\n'.join(lines) + b'\n' if cte==None: # Use heuristics to decide on the "best" encoding. - try: - return '7bit', normal_body(lines).decode('ascii') - except UnicodeDecodeError: - pass - if (policy.cte_type == '8bit' and - max(len(x) for x in lines) <= policy.max_line_length): - return '8bit', normal_body(lines).decode('ascii', 'surrogateescape') + if max(len(x) for x in lines) <= policy.max_line_length: + try: + return '7bit', normal_body(lines).decode('ascii') + except UnicodeDecodeError: + pass + if policy.cte_type == '8bit': + return '8bit', normal_body(lines).decode('ascii', 'surrogateescape') sniff = embedded_body(lines[:10]) sniff_qp = quoprimime.body_encode(sniff.decode('latin-1'), policy.max_line_length) diff --git a/Lib/test/test_email/test_contentmanager.py b/Lib/test/test_email/test_contentmanager.py index 169058eac83da..64dca2d017e62 100644 --- a/Lib/test/test_email/test_contentmanager.py +++ b/Lib/test/test_email/test_contentmanager.py @@ -329,6 +329,21 @@ def test_set_text_charset_latin_1(self): self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content) self.assertEqual(m.get_content(), content) + def test_set_text_plain_long_line_heuristics(self): + m = self._make_message() + content = ("Simple but long message that is over 78 characters" + " long to force transfer encoding.\n") + raw_data_manager.set_content(m, content) + self.assertEqual(str(m), textwrap.dedent("""\ + Content-Type: text/plain; charset="utf-8" + Content-Transfer-Encoding: quoted-printable + + Simple but long message that is over 78 characters long to = + force transfer encoding. + """)) + self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content) + self.assertEqual(m.get_content(), content) + def test_set_text_short_line_minimal_non_ascii_heuristics(self): m = self._make_message() content = "et l? il est mont? sur moi et il commence ? m'?to.\n" diff --git a/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst new file mode 100644 index 0000000000000..1b9fe609c25b7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst @@ -0,0 +1 @@ +If text content lines are longer than policy.max_line_length, always use a content-encoding to make sure they are wrapped. From webhook-mailer at python.org Wed May 13 21:22:38 2020 From: webhook-mailer at python.org (Hai Shi) Date: Thu, 14 May 2020 01:22:38 -0000 Subject: [Python-checkins] bpo-40275: Import locale module lazily in gettext (GH-19905) Message-ID: https://github.com/python/cpython/commit/7443d42021d433da0497f8ba651daa47e7dc1991 commit: 7443d42021d433da0497f8ba651daa47e7dc1991 branch: master author: Hai Shi committer: GitHub date: 2020-05-14T03:22:30+02:00 summary: bpo-40275: Import locale module lazily in gettext (GH-19905) files: M Lib/gettext.py diff --git a/Lib/gettext.py b/Lib/gettext.py index b98f501884b75..77b67aef4204c 100644 --- a/Lib/gettext.py +++ b/Lib/gettext.py @@ -46,7 +46,6 @@ # find this format documented anywhere. -import locale import os import re import sys @@ -210,6 +209,7 @@ def func(n): def _expand_lang(loc): + import locale loc = locale.normalize(loc) COMPONENT_CODESET = 1 << 0 COMPONENT_TERRITORY = 1 << 1 @@ -278,6 +278,7 @@ def lgettext(self, message): import warnings warnings.warn('lgettext() is deprecated, use gettext() instead', DeprecationWarning, 2) + import locale if self._fallback: with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'.*\blgettext\b.*', @@ -299,6 +300,7 @@ def lngettext(self, msgid1, msgid2, n): import warnings warnings.warn('lngettext() is deprecated, use ngettext() instead', DeprecationWarning, 2) + import locale if self._fallback: with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'.*\blngettext\b.*', @@ -462,6 +464,7 @@ def lgettext(self, message): import warnings warnings.warn('lgettext() is deprecated, use gettext() instead', DeprecationWarning, 2) + import locale missing = object() tmsg = self._catalog.get(message, missing) if tmsg is missing: @@ -476,6 +479,7 @@ def lngettext(self, msgid1, msgid2, n): import warnings warnings.warn('lngettext() is deprecated, use ngettext() instead', DeprecationWarning, 2) + import locale try: tmsg = self._catalog[(msgid1, self.plural(n))] except KeyError: @@ -668,6 +672,7 @@ def ldgettext(domain, message): import warnings warnings.warn('ldgettext() is deprecated, use dgettext() instead', DeprecationWarning, 2) + import locale codeset = _localecodesets.get(domain) try: with warnings.catch_warnings(): @@ -695,6 +700,7 @@ def ldngettext(domain, msgid1, msgid2, n): import warnings warnings.warn('ldngettext() is deprecated, use dngettext() instead', DeprecationWarning, 2) + import locale codeset = _localecodesets.get(domain) try: with warnings.catch_warnings(): From webhook-mailer at python.org Thu May 14 12:06:07 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 14 May 2020 16:06:07 -0000 Subject: [Python-checkins] bpo-40549: posixmodule.c uses defining_class (GH-20075) Message-ID: https://github.com/python/cpython/commit/97f33c35445e6d67df24dcbafef7b78333feb778 commit: 97f33c35445e6d67df24dcbafef7b78333feb778 branch: master author: Victor Stinner committer: GitHub date: 2020-05-14T18:05:58+02:00 summary: bpo-40549: posixmodule.c uses defining_class (GH-20075) Pass PEP 573 defining_class to os.DirEntry methods. The module state is now retrieve from defining_class rather than Py_TYPE(self), to support subclasses (even if DirEntry doesn't support subclasses yet). * Pass the module rather than defining_class to DirEntry_fetch_stat(). * Only get the module state once in _posix_clear(), _posix_traverse() and _posixmodule_exec(). files: M Modules/clinic/posixmodule.c.h M Modules/posixmodule.c diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index cf6d7449bac83..41baa45573979 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -8388,18 +8388,24 @@ PyDoc_STRVAR(os_DirEntry_is_symlink__doc__, "Return True if the entry is a symbolic link; cached per entry."); #define OS_DIRENTRY_IS_SYMLINK_METHODDEF \ - {"is_symlink", (PyCFunction)os_DirEntry_is_symlink, METH_NOARGS, os_DirEntry_is_symlink__doc__}, + {"is_symlink", (PyCFunction)(void(*)(void))os_DirEntry_is_symlink, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_symlink__doc__}, static int -os_DirEntry_is_symlink_impl(DirEntry *self); +os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class); static PyObject * -os_DirEntry_is_symlink(DirEntry *self, PyObject *Py_UNUSED(ignored)) +os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":is_symlink", _keywords, 0}; int _return_value; - _return_value = os_DirEntry_is_symlink_impl(self); + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + _return_value = os_DirEntry_is_symlink_impl(self, defining_class); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -8416,34 +8422,25 @@ PyDoc_STRVAR(os_DirEntry_stat__doc__, "Return stat_result object for the entry; cached per entry."); #define OS_DIRENTRY_STAT_METHODDEF \ - {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__}, + {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__}, static PyObject * -os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks); +os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks); static PyObject * -os_DirEntry_stat(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_stat(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"follow_symlinks", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "stat", 0}; - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + static _PyArg_Parser _parser = {"|$p:stat", _keywords, 0}; int follow_symlinks = 1; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_kwonly; - } - follow_symlinks = PyObject_IsTrue(args[0]); - if (follow_symlinks < 0) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &follow_symlinks)) { goto exit; } -skip_optional_kwonly: - return_value = os_DirEntry_stat_impl(self, follow_symlinks); + return_value = os_DirEntry_stat_impl(self, defining_class, follow_symlinks); exit: return return_value; @@ -8456,35 +8453,26 @@ PyDoc_STRVAR(os_DirEntry_is_dir__doc__, "Return True if the entry is a directory; cached per entry."); #define OS_DIRENTRY_IS_DIR_METHODDEF \ - {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__}, + {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__}, static int -os_DirEntry_is_dir_impl(DirEntry *self, int follow_symlinks); +os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks); static PyObject * -os_DirEntry_is_dir(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_dir(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"follow_symlinks", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "is_dir", 0}; - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + static _PyArg_Parser _parser = {"|$p:is_dir", _keywords, 0}; int follow_symlinks = 1; int _return_value; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_kwonly; - } - follow_symlinks = PyObject_IsTrue(args[0]); - if (follow_symlinks < 0) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &follow_symlinks)) { goto exit; } -skip_optional_kwonly: - _return_value = os_DirEntry_is_dir_impl(self, follow_symlinks); + _return_value = os_DirEntry_is_dir_impl(self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -8501,35 +8489,26 @@ PyDoc_STRVAR(os_DirEntry_is_file__doc__, "Return True if the entry is a file; cached per entry."); #define OS_DIRENTRY_IS_FILE_METHODDEF \ - {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__}, + {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__}, static int -os_DirEntry_is_file_impl(DirEntry *self, int follow_symlinks); +os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks); static PyObject * -os_DirEntry_is_file(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_file(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"follow_symlinks", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "is_file", 0}; - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + static _PyArg_Parser _parser = {"|$p:is_file", _keywords, 0}; int follow_symlinks = 1; int _return_value; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_kwonly; - } - follow_symlinks = PyObject_IsTrue(args[0]); - if (follow_symlinks < 0) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &follow_symlinks)) { goto exit; } -skip_optional_kwonly: - _return_value = os_DirEntry_is_file_impl(self, follow_symlinks); + _return_value = os_DirEntry_is_file_impl(self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -9417,4 +9396,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=be90d3aba972098b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=005919eaaef3f8e6 input=a9049054013a1b77]*/ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 60a60e9aed76b..2ddf30de89a68 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -2101,48 +2101,50 @@ statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds) static int _posix_clear(PyObject *module) { - Py_CLEAR(get_posix_state(module)->billion); - Py_CLEAR(get_posix_state(module)->DirEntryType); - Py_CLEAR(get_posix_state(module)->ScandirIteratorType); + _posixstate *state = get_posix_state(module); + Py_CLEAR(state->billion); + Py_CLEAR(state->DirEntryType); + Py_CLEAR(state->ScandirIteratorType); #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) - Py_CLEAR(get_posix_state(module)->SchedParamType); + Py_CLEAR(state->SchedParamType); #endif - Py_CLEAR(get_posix_state(module)->StatResultType); - Py_CLEAR(get_posix_state(module)->StatVFSResultType); - Py_CLEAR(get_posix_state(module)->TerminalSizeType); - Py_CLEAR(get_posix_state(module)->TimesResultType); - Py_CLEAR(get_posix_state(module)->UnameResultType); + Py_CLEAR(state->StatResultType); + Py_CLEAR(state->StatVFSResultType); + Py_CLEAR(state->TerminalSizeType); + Py_CLEAR(state->TimesResultType); + Py_CLEAR(state->UnameResultType); #if defined(HAVE_WAITID) && !defined(__APPLE__) - Py_CLEAR(get_posix_state(module)->WaitidResultType); + Py_CLEAR(state->WaitidResultType); #endif #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) - Py_CLEAR(get_posix_state(module)->struct_rusage); + Py_CLEAR(state->struct_rusage); #endif - Py_CLEAR(get_posix_state(module)->st_mode); + Py_CLEAR(state->st_mode); return 0; } static int _posix_traverse(PyObject *module, visitproc visit, void *arg) { - Py_VISIT(get_posix_state(module)->billion); - Py_VISIT(get_posix_state(module)->DirEntryType); - Py_VISIT(get_posix_state(module)->ScandirIteratorType); + _posixstate *state = get_posix_state(module); + Py_VISIT(state->billion); + Py_VISIT(state->DirEntryType); + Py_VISIT(state->ScandirIteratorType); #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) - Py_VISIT(get_posix_state(module)->SchedParamType); + Py_VISIT(state->SchedParamType); #endif - Py_VISIT(get_posix_state(module)->StatResultType); - Py_VISIT(get_posix_state(module)->StatVFSResultType); - Py_VISIT(get_posix_state(module)->TerminalSizeType); - Py_VISIT(get_posix_state(module)->TimesResultType); - Py_VISIT(get_posix_state(module)->UnameResultType); + Py_VISIT(state->StatResultType); + Py_VISIT(state->StatVFSResultType); + Py_VISIT(state->TerminalSizeType); + Py_VISIT(state->TimesResultType); + Py_VISIT(state->UnameResultType); #if defined(HAVE_WAITID) && !defined(__APPLE__) - Py_VISIT(get_posix_state(module)->WaitidResultType); + Py_VISIT(state->WaitidResultType); #endif #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) - Py_VISIT(get_posix_state(module)->struct_rusage); + Py_VISIT(state->struct_rusage); #endif - Py_VISIT(get_posix_state(module)->st_mode); + Py_VISIT(state->st_mode); return 0; } @@ -12747,17 +12749,20 @@ DirEntry_dealloc(DirEntry *entry) /* Forward reference */ static int -DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits); +DirEntry_test_mode(PyTypeObject *defining_class, DirEntry *self, + int follow_symlinks, unsigned short mode_bits); /*[clinic input] os.DirEntry.is_symlink -> bool + defining_class: defining_class + / Return True if the entry is a symbolic link; cached per entry. [clinic start generated code]*/ static int -os_DirEntry_is_symlink_impl(DirEntry *self) -/*[clinic end generated code: output=42244667d7bcfc25 input=1605a1b4b96976c3]*/ +os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class) +/*[clinic end generated code: output=293096d589b6d47c input=e9acc5ee4d511113]*/ { #ifdef MS_WINDOWS return (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK; @@ -12766,21 +12771,15 @@ os_DirEntry_is_symlink_impl(DirEntry *self) if (self->d_type != DT_UNKNOWN) return self->d_type == DT_LNK; else - return DirEntry_test_mode(self, 0, S_IFLNK); + return DirEntry_test_mode(defining_class, self, 0, S_IFLNK); #else /* POSIX without d_type */ - return DirEntry_test_mode(self, 0, S_IFLNK); + return DirEntry_test_mode(defining_class, self, 0, S_IFLNK); #endif } -static inline PyObject* -DirEntry_get_module(DirEntry *self) -{ - return PyType_GetModule(Py_TYPE(self)); -} - static PyObject * -DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) +DirEntry_fetch_stat(PyObject *module, DirEntry *self, int follow_symlinks) { int result; STRUCT_STAT st; @@ -12816,18 +12815,18 @@ DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) if (result != 0) return path_object_error(self->path); - return _pystat_fromstructstat(DirEntry_get_module(self), &st); + return _pystat_fromstructstat(module, &st); } static PyObject * -DirEntry_get_lstat(DirEntry *self) +DirEntry_get_lstat(PyTypeObject *defining_class, DirEntry *self) { if (!self->lstat) { + PyObject *module = PyType_GetModule(defining_class); #ifdef MS_WINDOWS - self->lstat = _pystat_fromstructstat(DirEntry_get_module(self), - &self->win32_lstat); + self->lstat = _pystat_fromstructstat(module, &self->win32_lstat); #else /* POSIX */ - self->lstat = DirEntry_fetch_stat(self, 0); + self->lstat = DirEntry_fetch_stat(module, self, 0); #endif } Py_XINCREF(self->lstat); @@ -12836,6 +12835,8 @@ DirEntry_get_lstat(DirEntry *self) /*[clinic input] os.DirEntry.stat + defining_class: defining_class + / * follow_symlinks: bool = True @@ -12843,20 +12844,26 @@ Return stat_result object for the entry; cached per entry. [clinic start generated code]*/ static PyObject * -os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks) -/*[clinic end generated code: output=008593b3a6d01305 input=280d14c1d6f1d00d]*/ +os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks) +/*[clinic end generated code: output=23f803e19c3e780e input=e816273c4e67ee98]*/ { - if (!follow_symlinks) - return DirEntry_get_lstat(self); + if (!follow_symlinks) { + return DirEntry_get_lstat(defining_class, self); + } if (!self->stat) { - int result = os_DirEntry_is_symlink_impl(self); - if (result == -1) + int result = os_DirEntry_is_symlink_impl(self, defining_class); + if (result == -1) { return NULL; - else if (result) - self->stat = DirEntry_fetch_stat(self, 1); - else - self->stat = DirEntry_get_lstat(self); + } + if (result) { + PyObject *module = PyType_GetModule(defining_class); + self->stat = DirEntry_fetch_stat(module, self, 1); + } + else { + self->stat = DirEntry_get_lstat(defining_class, self); + } } Py_XINCREF(self->stat); @@ -12865,7 +12872,8 @@ os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks) /* Set exception and return -1 on error, 0 for False, 1 for True */ static int -DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits) +DirEntry_test_mode(PyTypeObject *defining_class, DirEntry *self, + int follow_symlinks, unsigned short mode_bits) { PyObject *stat = NULL; PyObject *st_mode = NULL; @@ -12890,7 +12898,7 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits #if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE) if (need_stat) { #endif - stat = os_DirEntry_stat_impl(self, follow_symlinks); + stat = os_DirEntry_stat_impl(self, defining_class, follow_symlinks); if (!stat) { if (PyErr_ExceptionMatches(PyExc_FileNotFoundError)) { /* If file doesn't exist (anymore), then return False @@ -12900,7 +12908,8 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits } goto error; } - st_mode = PyObject_GetAttr(stat, get_posix_state(DirEntry_get_module(self))->st_mode); + _posixstate* state = get_posix_state(PyType_GetModule(defining_class)); + st_mode = PyObject_GetAttr(stat, state->st_mode); if (!st_mode) goto error; @@ -12943,6 +12952,8 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits /*[clinic input] os.DirEntry.is_dir -> bool + defining_class: defining_class + / * follow_symlinks: bool = True @@ -12950,14 +12961,17 @@ Return True if the entry is a directory; cached per entry. [clinic start generated code]*/ static int -os_DirEntry_is_dir_impl(DirEntry *self, int follow_symlinks) -/*[clinic end generated code: output=ad2e8d54365da287 input=0135232766f53f58]*/ +os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks) +/*[clinic end generated code: output=0cd453b9c0987fdf input=1a4ffd6dec9920cb]*/ { - return DirEntry_test_mode(self, follow_symlinks, S_IFDIR); + return DirEntry_test_mode(defining_class, self, follow_symlinks, S_IFDIR); } /*[clinic input] os.DirEntry.is_file -> bool + defining_class: defining_class + / * follow_symlinks: bool = True @@ -12965,10 +12979,11 @@ Return True if the entry is a file; cached per entry. [clinic start generated code]*/ static int -os_DirEntry_is_file_impl(DirEntry *self, int follow_symlinks) -/*[clinic end generated code: output=8462ade481d8a476 input=0dc90be168b041ee]*/ +os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks) +/*[clinic end generated code: output=f7c277ab5ba80908 input=0a64c5a12e802e3b]*/ { - return DirEntry_test_mode(self, follow_symlinks, S_IFREG); + return DirEntry_test_mode(defining_class, self, follow_symlinks, S_IFREG); } /*[clinic input] @@ -13496,6 +13511,8 @@ static PyType_Spec ScandirIteratorType_spec = { MODNAME ".ScandirIterator", sizeof(ScandirIterator), 0, + // bpo-40549: Py_TPFLAGS_BASETYPE should not be used, since + // PyType_GetModule(Py_TYPE(self)) doesn't work on a subclass instance. Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_FINALIZE, ScandirIteratorType_slots }; @@ -14785,12 +14802,10 @@ static const char * const have_functions[] = { static int posixmodule_exec(PyObject *m) { - PyObject *v; - PyObject *list; - const char * const *trace; + _posixstate *state = get_posix_state(m); /* Initialize environ dictionary */ - v = convertenviron(); + PyObject *v = convertenviron(); Py_XINCREF(v); if (v == NULL || PyModule_AddObject(m, "environ", v) != 0) return -1; @@ -14813,7 +14828,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(WaitidResultType); PyModule_AddObject(m, "waitid_result", WaitidResultType); - get_posix_state(m)->WaitidResultType = WaitidResultType; + state->WaitidResultType = WaitidResultType; #endif stat_result_desc.name = "os.stat_result"; /* see issue #19209 */ @@ -14826,7 +14841,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(StatResultType); PyModule_AddObject(m, "stat_result", StatResultType); - get_posix_state(m)->StatResultType = StatResultType; + state->StatResultType = StatResultType; structseq_new = ((PyTypeObject *)StatResultType)->tp_new; ((PyTypeObject *)StatResultType)->tp_new = statresult_new; @@ -14837,7 +14852,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(StatVFSResultType); PyModule_AddObject(m, "statvfs_result", StatVFSResultType); - get_posix_state(m)->StatVFSResultType = StatVFSResultType; + state->StatVFSResultType = StatVFSResultType; #ifdef NEED_TICKS_PER_SECOND # if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK) ticks_per_second = sysconf(_SC_CLK_TCK); @@ -14856,7 +14871,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(SchedParamType); PyModule_AddObject(m, "sched_param", SchedParamType); - get_posix_state(m)->SchedParamType = SchedParamType; + state->SchedParamType = SchedParamType; ((PyTypeObject *)SchedParamType)->tp_new = os_sched_param; #endif @@ -14867,14 +14882,14 @@ posixmodule_exec(PyObject *m) } Py_INCREF(TerminalSizeType); PyModule_AddObject(m, "terminal_size", TerminalSizeType); - get_posix_state(m)->TerminalSizeType = TerminalSizeType; + state->TerminalSizeType = TerminalSizeType; /* initialize scandir types */ PyObject *ScandirIteratorType = PyType_FromModuleAndSpec(m, &ScandirIteratorType_spec, NULL); if (ScandirIteratorType == NULL) { return -1; } - get_posix_state(m)->ScandirIteratorType = ScandirIteratorType; + state->ScandirIteratorType = ScandirIteratorType; PyObject *DirEntryType = PyType_FromModuleAndSpec(m, &DirEntryType_spec, NULL); if (DirEntryType == NULL) { @@ -14882,7 +14897,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(DirEntryType); PyModule_AddObject(m, "DirEntry", DirEntryType); - get_posix_state(m)->DirEntryType = DirEntryType; + state->DirEntryType = DirEntryType; times_result_desc.name = MODNAME ".times_result"; PyObject *TimesResultType = (PyObject *)PyStructSequence_NewType(×_result_desc); @@ -14891,7 +14906,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(TimesResultType); PyModule_AddObject(m, "times_result", TimesResultType); - get_posix_state(m)->TimesResultType = TimesResultType; + state->TimesResultType = TimesResultType; PyTypeObject *UnameResultType = PyStructSequence_NewType(&uname_result_desc); if (UnameResultType == NULL) { @@ -14899,7 +14914,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(UnameResultType); PyModule_AddObject(m, "uname_result", (PyObject *)UnameResultType); - get_posix_state(m)->UnameResultType = (PyObject *)UnameResultType; + state->UnameResultType = (PyObject *)UnameResultType; #ifdef __APPLE__ /* @@ -14939,15 +14954,15 @@ posixmodule_exec(PyObject *m) #endif /* __APPLE__ */ - if ((get_posix_state(m)->billion = PyLong_FromLong(1000000000)) == NULL) + if ((state->billion = PyLong_FromLong(1000000000)) == NULL) return -1; #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) - get_posix_state(m)->struct_rusage = PyUnicode_InternFromString("struct_rusage"); - if (get_posix_state(m)->struct_rusage == NULL) + state->struct_rusage = PyUnicode_InternFromString("struct_rusage"); + if (state->struct_rusage == NULL) return -1; #endif - get_posix_state(m)->st_mode = PyUnicode_InternFromString("st_mode"); - if (get_posix_state(m)->st_mode == NULL) + state->st_mode = PyUnicode_InternFromString("st_mode"); + if (state->st_mode == NULL) return -1; /* suppress "function not used" warnings */ @@ -14964,10 +14979,11 @@ posixmodule_exec(PyObject *m) * provide list of locally available functions * so os.py can populate support_* lists */ - list = PyList_New(0); - if (!list) + PyObject *list = PyList_New(0); + if (!list) { return -1; - for (trace = have_functions; *trace; trace++) { + } + for (const char * const *trace = have_functions; *trace; trace++) { PyObject *unicode = PyUnicode_DecodeASCII(*trace, strlen(*trace), NULL); if (!unicode) return -1; From webhook-mailer at python.org Thu May 14 12:46:32 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 14 May 2020 16:46:32 -0000 Subject: [Python-checkins] Revert "bpo-32604: [_xxsubinterpreters] Propagate exceptions. (GH-19768)" (GH-20089) Message-ID: https://github.com/python/cpython/commit/f2c3b6823bc4777d4a14eb0c3615b719521f763a commit: f2c3b6823bc4777d4a14eb0c3615b719521f763a branch: master author: Victor Stinner committer: GitHub date: 2020-05-14T18:46:24+02:00 summary: Revert "bpo-32604: [_xxsubinterpreters] Propagate exceptions. (GH-19768)" (GH-20089) * Revert "bpo-40613: Remove compiler warning from _xxsubinterpretersmodule (GH-20069)" This reverts commit fa0a66e62d087765dbc5c1b89d6149a23ecfb0a6. * Revert "bpo-32604: [_xxsubinterpreters] Propagate exceptions. (GH-19768)" This reverts commit a1d9e0accd33af1d8e90fc48b34c13d7b07dcf57. files: M Lib/test/test__xxsubinterpreters.py M Modules/_xxsubinterpretersmodule.c diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index 039c040ad3950..e17bfde2c2f75 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -1,4 +1,3 @@ -import builtins from collections import namedtuple import contextlib import itertools @@ -867,11 +866,10 @@ def assert_run_failed(self, exctype, msg=None): yield if msg is None: self.assertEqual(str(caught.exception).split(':')[0], - exctype.__name__) + str(exctype)) else: self.assertEqual(str(caught.exception), - "{}: {}".format(exctype.__name__, msg)) - self.assertIsInstance(caught.exception.__cause__, exctype) + "{}: {}".format(exctype, msg)) def test_invalid_syntax(self): with self.assert_run_failed(SyntaxError): @@ -1062,301 +1060,6 @@ def f(): self.assertEqual(retcode, 0) -def build_exception(exctype, /, *args, **kwargs): - # XXX Use __qualname__? - name = exctype.__name__ - argreprs = [repr(a) for a in args] - if kwargs: - kwargreprs = [f'{k}={v!r}' for k, v in kwargs.items()] - script = f'{name}({", ".join(argreprs)}, {", ".join(kwargreprs)})' - else: - script = f'{name}({", ".join(argreprs)})' - expected = exctype(*args, **kwargs) - return script, expected - - -def build_exceptions(self, *exctypes, default=None, custom=None, bases=True): - if not exctypes: - raise NotImplementedError - if not default: - default = ((), {}) - elif isinstance(default, str): - default = ((default,), {}) - elif type(default) is not tuple: - raise NotImplementedError - elif len(default) != 2: - default = (default, {}) - elif type(default[0]) is not tuple: - default = (default, {}) - elif type(default[1]) is not dict: - default = (default, {}) - # else leave it alone - - for exctype in exctypes: - customtype = None - values = default - if custom: - if exctype in custom: - customtype = exctype - elif bases: - for customtype in custom: - if issubclass(exctype, customtype): - break - else: - customtype = None - if customtype is not None: - values = custom[customtype] - if values is None: - continue - args, kwargs = values - script, expected = build_exception(exctype, *args, **kwargs) - yield exctype, customtype, script, expected - - -try: - raise Exception -except Exception as exc: - assert exc.__traceback__ is not None - Traceback = type(exc.__traceback__) - - -class RunFailedTests(TestBase): - - BUILTINS = [v - for v in vars(builtins).values() - if (type(v) is type - and issubclass(v, Exception) - #and issubclass(v, BaseException) - ) - ] - BUILTINS_SPECIAL = [ - # These all have extra attributes (i.e. args/kwargs) - SyntaxError, - ImportError, - UnicodeError, - OSError, - SystemExit, - StopIteration, - ] - - @classmethod - def build_exceptions(cls, exctypes=None, default=(), custom=None): - if exctypes is None: - exctypes = cls.BUILTINS - if custom is None: - # Skip the "special" ones. - custom = {et: None for et in cls.BUILTINS_SPECIAL} - yield from build_exceptions(*exctypes, default=default, custom=custom) - - def assertExceptionsEqual(self, exc, expected, *, chained=True): - if type(expected) is type: - self.assertIs(type(exc), expected) - return - elif not isinstance(exc, Exception): - self.assertEqual(exc, expected) - elif not isinstance(expected, Exception): - self.assertEqual(exc, expected) - else: - # Plain equality doesn't work, so we have to compare manually. - self.assertIs(type(exc), type(expected)) - self.assertEqual(exc.args, expected.args) - self.assertEqual(exc.__reduce__(), expected.__reduce__()) - if chained: - self.assertExceptionsEqual(exc.__context__, - expected.__context__) - self.assertExceptionsEqual(exc.__cause__, - expected.__cause__) - self.assertEqual(exc.__suppress_context__, - expected.__suppress_context__) - - def assertTracebacksEqual(self, tb, expected): - if not isinstance(tb, Traceback): - self.assertEqual(tb, expected) - elif not isinstance(expected, Traceback): - self.assertEqual(tb, expected) - else: - self.assertEqual(tb.tb_frame.f_code.co_name, - expected.tb_frame.f_code.co_name) - self.assertEqual(tb.tb_frame.f_code.co_filename, - expected.tb_frame.f_code.co_filename) - self.assertEqual(tb.tb_lineno, expected.tb_lineno) - self.assertTracebacksEqual(tb.tb_next, expected.tb_next) - - # XXX Move this to TestBase? - @contextlib.contextmanager - def expected_run_failure(self, expected): - exctype = expected if type(expected) is type else type(expected) - - with self.assertRaises(interpreters.RunFailedError) as caught: - yield caught - exc = caught.exception - - modname = exctype.__module__ - if modname == 'builtins' or modname == '__main__': - exctypename = exctype.__name__ - else: - exctypename = f'{modname}.{exctype.__name__}' - if exctype is expected: - self.assertEqual(str(exc).split(':')[0], exctypename) - else: - self.assertEqual(str(exc), f'{exctypename}: {expected}') - self.assertExceptionsEqual(exc.__cause__, expected) - if exc.__cause__ is not None: - self.assertIsNotNone(exc.__cause__.__traceback__) - - def test_builtin_exceptions(self): - interpid = interpreters.create() - msg = '' - for i, info in enumerate(self.build_exceptions( - default=msg, - custom={ - SyntaxError: ((msg, '', 1, 3, 'a +?'), {}), - ImportError: ((msg,), {'name': 'spam', 'path': '/x/spam.py'}), - UnicodeError: None, - #UnicodeError: ((), {}), - #OSError: ((), {}), - SystemExit: ((1,), {}), - StopIteration: (('',), {}), - }, - )): - exctype, _, script, expected = info - testname = f'{i+1} - {script}' - script = f'raise {script}' - - with self.subTest(testname): - with self.expected_run_failure(expected): - interpreters.run_string(interpid, script) - - def test_custom_exception_from___main__(self): - script = dedent(""" - class SpamError(Exception): - def __init__(self, q): - super().__init__(f'got {q}') - self.q = q - raise SpamError('eggs') - """) - expected = Exception(f'SpamError: got {"eggs"}') - - interpid = interpreters.create() - with self.assertRaises(interpreters.RunFailedError) as caught: - interpreters.run_string(interpid, script) - cause = caught.exception.__cause__ - - self.assertExceptionsEqual(cause, expected) - - class SpamError(Exception): - # The normal Exception.__reduce__() produces a funny result - # here. So we have to use a custom __new__(). - def __new__(cls, q): - if type(q) is SpamError: - return q - return super().__new__(cls, q) - def __init__(self, q): - super().__init__(f'got {q}') - self.q = q - - def test_custom_exception(self): - script = dedent(""" - import test.test__xxsubinterpreters - SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamError - raise SpamError('eggs') - """) - try: - ns = {} - exec(script, ns, ns) - except Exception as exc: - expected = exc - - interpid = interpreters.create() - with self.expected_run_failure(expected): - interpreters.run_string(interpid, script) - - class SpamReducedError(Exception): - def __init__(self, q): - super().__init__(f'got {q}') - self.q = q - def __reduce__(self): - return (type(self), (self.q,), {}) - - def test_custom___reduce__(self): - script = dedent(""" - import test.test__xxsubinterpreters - SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamReducedError - raise SpamError('eggs') - """) - try: - exec(script, (ns := {'__name__': '__main__'}), ns) - except Exception as exc: - expected = exc - - interpid = interpreters.create() - with self.expected_run_failure(expected): - interpreters.run_string(interpid, script) - - def test_traceback_propagated(self): - script = dedent(""" - def do_spam(): - raise Exception('uh-oh') - def do_eggs(): - return do_spam() - class Spam: - def do(self): - return do_eggs() - def get_handler(): - def handler(): - return Spam().do() - return handler - go = (lambda: get_handler()()) - def iter_all(): - yield from (go() for _ in [True]) - yield None - def main(): - for v in iter_all(): - pass - main() - """) - try: - ns = {} - exec(script, ns, ns) - except Exception as exc: - expected = exc - expectedtb = exc.__traceback__.tb_next - - interpid = interpreters.create() - with self.expected_run_failure(expected) as caught: - interpreters.run_string(interpid, script) - exc = caught.exception - - self.assertTracebacksEqual(exc.__cause__.__traceback__, - expectedtb) - - def test_chained_exceptions(self): - script = dedent(""" - try: - raise ValueError('msg 1') - except Exception as exc1: - try: - raise TypeError('msg 2') - except Exception as exc2: - try: - raise IndexError('msg 3') from exc2 - except Exception: - raise AttributeError('msg 4') - """) - try: - exec(script, {}, {}) - except Exception as exc: - expected = exc - - interpid = interpreters.create() - with self.expected_run_failure(expected) as caught: - interpreters.run_string(interpid, script) - exc = caught.exception - - # ...just to be sure. - self.assertIs(type(exc.__cause__), AttributeError) - - ################################## # channel tests diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 18dd8918e7c89..8a6fce9e0b4bd 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1,4 +1,5 @@ -/* _interpreters module */ + +/* interpreters module */ /* low-level access to interpreter primitives */ #include "Python.h" @@ -6,927 +7,35 @@ #include "interpreteridobject.h" -// XXX Emit a warning? -#define IGNORE_FAILURE(msg) \ - fprintf(stderr, " -----\nRunFailedError: %s\n", msg); \ - PyErr_PrintEx(0); \ - fprintf(stderr, " -----\n"); \ - PyErr_Clear(); - -typedef void (*_deallocfunc)(void *); - -static PyInterpreterState * -_get_current(void) -{ - // _PyInterpreterState_Get() aborts if lookup fails, so don't need - // to check the result for NULL. - return _PyInterpreterState_Get(); -} - - -/* string utils *************************************************************/ - -// PyMem_Free() must be used to dealocate the resulting string. static char * -_strdup_and_size(const char *data, Py_ssize_t *psize, _deallocfunc *dealloc) -{ - if (data == NULL) { - if (psize != NULL) { - *psize = 0; - } - if (dealloc != NULL) { - *dealloc = NULL; - } - return ""; - } - - Py_ssize_t size; - if (psize == NULL) { - size = strlen(data); - } else { - size = *psize; - if (size == 0) { - size = strlen(data); - *psize = size; // The size "return" value. - } - } - char *copied = PyMem_Malloc(size+1); - if (copied == NULL) { - PyErr_NoMemory(); - return NULL; - } - if (dealloc != NULL) { - *dealloc = PyMem_Free; - } - memcpy(copied, data, size+1); - return copied; -} - -static const char * -_pyobj_get_str_and_size(PyObject *obj, Py_ssize_t *psize) -{ - if (PyUnicode_Check(obj)) { - return PyUnicode_AsUTF8AndSize(obj, psize); - } else { - const char *data = NULL; - PyBytes_AsStringAndSize(obj, (char **)&data, psize); - return data; - } -} - -/* "raw" strings */ - -typedef struct _rawstring { - Py_ssize_t size; - const char *data; - _deallocfunc dealloc; -} _rawstring; - -static void -_rawstring_init(_rawstring *raw) -{ - raw->size = 0; - raw->data = NULL; - raw->dealloc = NULL; -} - -static _rawstring * -_rawstring_new(void) -{ - _rawstring *raw = PyMem_NEW(_rawstring, 1); - if (raw == NULL) { - PyErr_NoMemory(); - return NULL; - } - _rawstring_init(raw); - return raw; -} - -static void -_rawstring_clear(_rawstring *raw) -{ - if (raw->data != NULL && raw->dealloc != NULL) { - (*raw->dealloc)((void *)raw->data); - } - _rawstring_init(raw); -} - -static void -_rawstring_free(_rawstring *raw) -{ - _rawstring_clear(raw); - PyMem_Free(raw); -} - -static int -_rawstring_is_clear(_rawstring *raw) -{ - return raw->size == 0 && raw->data == NULL && raw->dealloc == NULL; -} - -//static void -//_rawstring_move(_rawstring *raw, _rawstring *src) -//{ -// raw->size = src->size; -// raw->data = src->data; -// raw->dealloc = src->dealloc; -// _rawstring_init(src); -//} - -static void -_rawstring_proxy(_rawstring *raw, const char *str) +_copy_raw_string(PyObject *strobj) { + const char *str = PyUnicode_AsUTF8(strobj); if (str == NULL) { - str = ""; - } - raw->size = strlen(str); - raw->data = str; - raw->dealloc = NULL; -} - -static int -_rawstring_buffer(_rawstring *raw, Py_ssize_t size) -{ - raw->data = PyMem_Malloc(size+1); - if (raw->data == NULL) { - PyErr_NoMemory(); - return -1; - } - raw->size = size; - raw->dealloc = PyMem_Free; - return 0; -} - -static int -_rawstring_strcpy(_rawstring *raw, const char *str, Py_ssize_t size) -{ - _deallocfunc dealloc = NULL; - const char *copied = _strdup_and_size(str, &size, &dealloc); - if (copied == NULL) { - return -1; - } - - raw->size = size; - raw->dealloc = dealloc; - raw->data = copied; - return 0; -} - -static int -_rawstring_from_pyobj(_rawstring *raw, PyObject *obj) -{ - Py_ssize_t size = 0; - const char *data = _pyobj_get_str_and_size(obj, &size); - if (PyErr_Occurred()) { - return -1; - } - if (_rawstring_strcpy(raw, data, size) != 0) { - return -1; - } - return 0; -} - -static int -_rawstring_from_pyobj_attr(_rawstring *raw, PyObject *obj, const char *attr) -{ - int res = -1; - PyObject *valueobj = PyObject_GetAttrString(obj, attr); - if (valueobj == NULL) { - goto done; - } - if (!PyUnicode_Check(valueobj)) { - // XXX PyObject_Str()? Repr()? - goto done; - } - const char *valuestr = PyUnicode_AsUTF8(valueobj); - if (valuestr == NULL) { - if (PyErr_Occurred()) { - goto done; - } - } else if (_rawstring_strcpy(raw, valuestr, 0) != 0) { - _rawstring_clear(raw); - goto done; - } - res = 0; - -done: - Py_XDECREF(valueobj); - return res; -} - -static PyObject * -_rawstring_as_pybytes(_rawstring *raw) -{ - return PyBytes_FromStringAndSize(raw->data, raw->size); -} - - -/* object utils *************************************************************/ - -static void -_pyobj_identify_type(PyObject *obj, _rawstring *modname, _rawstring *clsname) -{ - PyObject *objtype = (PyObject *)Py_TYPE(obj); - - // Try __module__ and __name__. - if (_rawstring_from_pyobj_attr(modname, objtype, "__module__") != 0) { - // Fall back to the previous values in "modname". - IGNORE_FAILURE("bad __module__"); - } - if (_rawstring_from_pyobj_attr(clsname, objtype, "__name__") != 0) { - // Fall back to the previous values in "clsname". - IGNORE_FAILURE("bad __name__"); - } - - // XXX Fall back to __qualname__? - // XXX Fall back to tp_name? -} - -static PyObject * -_pyobj_get_class(const char *modname, const char *clsname) -{ - assert(clsname != NULL); - if (modname == NULL) { - modname = "builtins"; - } - - PyObject *module = PyImport_ImportModule(modname); - if (module == NULL) { - return NULL; - } - PyObject *cls = PyObject_GetAttrString(module, clsname); - Py_DECREF(module); - return cls; -} - -static PyObject * -_pyobj_create(const char *modname, const char *clsname, PyObject *arg) -{ - PyObject *cls = _pyobj_get_class(modname, clsname); - if (cls == NULL) { return NULL; } - PyObject *obj = NULL; - if (arg == NULL) { - obj = _PyObject_CallNoArg(cls); - } else { - obj = PyObject_CallFunction(cls, "O", arg); - } - Py_DECREF(cls); - return obj; -} - - -/* object snapshots */ - -typedef struct _objsnapshot { - // If modname is NULL then try "builtins" and "__main__". - _rawstring modname; - // clsname is required. - _rawstring clsname; - - // The rest are optional. - - // The serialized exception. - _rawstring *serialized; -} _objsnapshot; - -static void -_objsnapshot_init(_objsnapshot *osn) -{ - _rawstring_init(&osn->modname); - _rawstring_init(&osn->clsname); - osn->serialized = NULL; -} - -//static _objsnapshot * -//_objsnapshot_new(void) -//{ -// _objsnapshot *osn = PyMem_NEW(_objsnapshot, 1); -// if (osn == NULL) { -// PyErr_NoMemory(); -// return NULL; -// } -// _objsnapshot_init(osn); -// return osn; -//} - -static void -_objsnapshot_clear(_objsnapshot *osn) -{ - _rawstring_clear(&osn->modname); - _rawstring_clear(&osn->clsname); - if (osn->serialized != NULL) { - _rawstring_free(osn->serialized); - osn->serialized = NULL; - } -} - -//static void -//_objsnapshot_free(_objsnapshot *osn) -//{ -// _objsnapshot_clear(osn); -// PyMem_Free(osn); -//} - -#ifndef NDEBUG -static int -_objsnapshot_is_clear(_objsnapshot *osn) -{ - return osn->serialized == NULL - && _rawstring_is_clear(&osn->modname) - && _rawstring_is_clear(&osn->clsname); -} -#endif - -static void -_objsnapshot_summarize(_objsnapshot *osn, _rawstring *rawbuf, const char *msg) -{ - if (msg == NULL || *msg == '\0') { - // XXX Keep it NULL? - // XXX Keep it an empty string? - // XXX Use something more informative? - msg = ""; - } - const char *clsname = osn->clsname.data; - const char *modname = osn->modname.data; - if (modname && *modname == '\0') { - modname = NULL; - } - - // Prep the buffer. - Py_ssize_t size = strlen(clsname); - if (modname != NULL) { - if (strcmp(modname, "builtins") == 0) { - modname = NULL; - } else if (strcmp(modname, "__main__") == 0) { - modname = NULL; - } else { - size += strlen(modname) + 1; - } - } - if (msg != NULL) { - size += strlen(": ") + strlen(msg); - } - if (modname != NULL || msg != NULL) { - if (_rawstring_buffer(rawbuf, size) != 0) { - IGNORE_FAILURE("could not summarize object snapshot"); - return; - } - } - // ...else we'll proxy clsname as-is, so no need to allocate a buffer. - - // XXX Use __qualname__ somehow? - char *buf = (char *)rawbuf->data; - if (modname != NULL) { - if (msg != NULL) { - snprintf(buf, size+1, "%s.%s: %s", modname, clsname, msg); - } else { - snprintf(buf, size+1, "%s.%s", modname, clsname); - } - } else if (msg != NULL) { - snprintf(buf, size+1, "%s: %s", clsname, msg); - } else { - _rawstring_proxy(rawbuf, clsname); - } -} - -static _rawstring * -_objsnapshot_get_minimal_summary(_objsnapshot *osn, PyObject *obj) -{ - const char *str = NULL; - PyObject *objstr = PyObject_Str(obj); - if (objstr == NULL) { - PyErr_Clear(); - } else { - str = PyUnicode_AsUTF8(objstr); - if (str == NULL) { - PyErr_Clear(); - } - } - - _rawstring *summary = _rawstring_new(); - if (summary == NULL) { - return NULL; - } - _objsnapshot_summarize(osn, summary, str); - return summary; -} - -static void -_objsnapshot_extract(_objsnapshot *osn, PyObject *obj) -{ - assert(_objsnapshot_is_clear(osn)); - - // Get the "qualname". - _rawstring_proxy(&osn->modname, ""); - _rawstring_proxy(&osn->clsname, ""); - _pyobj_identify_type(obj, &osn->modname, &osn->clsname); - - // Serialize the object. - // XXX Use marshal? - PyObject *pickle = PyImport_ImportModule("pickle"); - if (pickle == NULL) { - IGNORE_FAILURE("could not serialize object: pickle import failed"); - return; - } - PyObject *objdata = PyObject_CallMethod(pickle, "dumps", "(O)", obj); - Py_DECREF(pickle); - if (objdata == NULL) { - IGNORE_FAILURE("could not serialize object: pickle.dumps failed"); - } else { - _rawstring *serialized = _rawstring_new(); - int res = _rawstring_from_pyobj(serialized, objdata); - Py_DECREF(objdata); - if (res != 0) { - IGNORE_FAILURE("could not serialize object: raw str failed"); - _rawstring_free(serialized); - } else if (serialized->size == 0) { - _rawstring_free(serialized); - } else { - osn->serialized = serialized; - } - } -} - -static PyObject * -_objsnapshot_resolve_serialized(_objsnapshot *osn) -{ - assert(osn->serialized != NULL); - - // XXX Use marshal? - PyObject *pickle = PyImport_ImportModule("pickle"); - if (pickle == NULL) { - return NULL; - } - PyObject *objdata = _rawstring_as_pybytes(osn->serialized); - if (objdata == NULL) { - return NULL; - } else { - PyObject *obj = PyObject_CallMethod(pickle, "loads", "O", objdata); - Py_DECREF(objdata); - return obj; - } -} - -static PyObject * -_objsnapshot_resolve_naive(_objsnapshot *osn, PyObject *arg) -{ - if (_rawstring_is_clear(&osn->clsname)) { - // We can't proceed without at least the class name. - PyErr_SetString(PyExc_ValueError, "missing class name"); - return NULL; - } - - if (osn->modname.data != NULL) { - return _pyobj_create(osn->modname.data, osn->clsname.data, arg); - } else { - PyObject *obj = _pyobj_create("builtins", osn->clsname.data, arg); - if (obj == NULL) { - PyErr_Clear(); - obj = _pyobj_create("__main__", osn->clsname.data, arg); - } - return obj; - } -} - -static PyObject * -_objsnapshot_resolve(_objsnapshot *osn) -{ - if (osn->serialized != NULL) { - PyObject *obj = _objsnapshot_resolve_serialized(osn); - if (obj != NULL) { - return obj; - } - IGNORE_FAILURE("could not de-serialize object"); - } - - // Fall back to naive resolution. - return _objsnapshot_resolve_naive(osn, NULL); -} - - -/* exception utils **********************************************************/ - -// _pyexc_create is inspired by _PyErr_SetObject(). - -static PyObject * -_pyexc_create(PyObject *exctype, const char *msg, PyObject *tb) -{ - assert(exctype != NULL && PyExceptionClass_Check(exctype)); - - PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL; - PyErr_Fetch(&curtype, &curexc, &curtb); - - // Create the object. - PyObject *exc = NULL; - if (msg != NULL) { - PyObject *msgobj = PyUnicode_FromString(msg); - if (msgobj == NULL) { - IGNORE_FAILURE("could not deserialize propagated error message"); - } - exc = _PyObject_CallOneArg(exctype, msgobj); - Py_XDECREF(msgobj); - } else { - exc = _PyObject_CallNoArg(exctype); - } - if (exc == NULL) { - return NULL; - } - - // Set the traceback, if any. - if (tb == NULL) { - tb = curtb; - } - if (tb != NULL) { - // This does *not* steal a reference! - PyException_SetTraceback(exc, tb); - } - - PyErr_Restore(curtype, curexc, curtb); - - return exc; -} - -/* traceback snapshots */ - -typedef struct _tbsnapshot { - _rawstring tbs_funcname; - _rawstring tbs_filename; - int tbs_lineno; - struct _tbsnapshot *tbs_next; -} _tbsnapshot; - -static void -_tbsnapshot_init(_tbsnapshot *tbs) -{ - _rawstring_init(&tbs->tbs_funcname); - _rawstring_init(&tbs->tbs_filename); - tbs->tbs_lineno = -1; - tbs->tbs_next = NULL; -} - -static _tbsnapshot * -_tbsnapshot_new(void) -{ - _tbsnapshot *tbs = PyMem_NEW(_tbsnapshot, 1); - if (tbs == NULL) { - PyErr_NoMemory(); - return NULL; - } - _tbsnapshot_init(tbs); - return tbs; -} - -static void _tbsnapshot_free(_tbsnapshot *); // forward - -static void -_tbsnapshot_clear(_tbsnapshot *tbs) -{ - _rawstring_clear(&tbs->tbs_funcname); - _rawstring_clear(&tbs->tbs_filename); - tbs->tbs_lineno = -1; - if (tbs->tbs_next != NULL) { - _tbsnapshot_free(tbs->tbs_next); - tbs->tbs_next = NULL; - } -} - -static void -_tbsnapshot_free(_tbsnapshot *tbs) -{ - _tbsnapshot_clear(tbs); - PyMem_Free(tbs); -} - -#ifndef NDEBUG -static int -_tbsnapshot_is_clear(_tbsnapshot *tbs) -{ - return tbs->tbs_lineno == -1 && tbs->tbs_next == NULL - && _rawstring_is_clear(&tbs->tbs_funcname) - && _rawstring_is_clear(&tbs->tbs_filename); -} -#endif - -static int -_tbsnapshot_from_pytb(_tbsnapshot *tbs, PyTracebackObject *pytb) -{ - assert(_tbsnapshot_is_clear(tbs)); - assert(pytb != NULL); - - PyCodeObject *pycode = pytb->tb_frame->f_code; - const char *funcname = PyUnicode_AsUTF8(pycode->co_name); - if (_rawstring_strcpy(&tbs->tbs_funcname, funcname, 0) != 0) { - goto error; - } - const char *filename = PyUnicode_AsUTF8(pycode->co_filename); - if (_rawstring_strcpy(&tbs->tbs_filename, filename, 0) != 0) { - goto error; - } - tbs->tbs_lineno = pytb->tb_lineno; - - return 0; - -error: - _tbsnapshot_clear(tbs); - return -1; -} - -static int -_tbsnapshot_extract(_tbsnapshot *tbs, PyTracebackObject *pytb) -{ - assert(_tbsnapshot_is_clear(tbs)); - assert(pytb != NULL); - - _tbsnapshot *next = NULL; - while (pytb->tb_next != NULL) { - _tbsnapshot *_next = _tbsnapshot_new(); - if (_next == NULL) { - goto error; - } - if (_tbsnapshot_from_pytb(_next, pytb) != 0) { - goto error; - } - if (next != NULL) { - _next->tbs_next = next; - } - next = _next; - pytb = pytb->tb_next; - } - if (_tbsnapshot_from_pytb(tbs, pytb) != 0) { - goto error; - } - tbs->tbs_next = next; - - return 0; - -error: - _tbsnapshot_clear(tbs); - return -1; -} - -static PyObject * -_tbsnapshot_resolve(_tbsnapshot *tbs) -{ - assert(!PyErr_Occurred()); - // At this point there should be no traceback set yet. - - while (tbs != NULL) { - const char *funcname = tbs->tbs_funcname.data; - const char *filename = tbs->tbs_filename.data; - _PyTraceback_Add(funcname ? funcname : "", - filename ? filename : "", - tbs->tbs_lineno); - tbs = tbs->tbs_next; - } - - PyObject *exctype = NULL, *excval = NULL, *tb = NULL; - PyErr_Fetch(&exctype, &excval, &tb); - // Leave it cleared. - return tb; -} - -/* exception snapshots */ - -typedef struct _excsnapshot { - _objsnapshot es_object; - _rawstring *es_msg; - struct _excsnapshot *es_cause; - struct _excsnapshot *es_context; - char es_suppress_context; - struct _tbsnapshot *es_traceback; -} _excsnapshot; - -static void -_excsnapshot_init(_excsnapshot *es) -{ - _objsnapshot_init(&es->es_object); - es->es_msg = NULL; - es->es_cause = NULL; - es->es_context = NULL; - es->es_suppress_context = 0; - es->es_traceback = NULL; -} - -static _excsnapshot * -_excsnapshot_new(void) { - _excsnapshot *es = PyMem_NEW(_excsnapshot, 1); - if (es == NULL) { + char *copied = PyMem_Malloc(strlen(str)+1); + if (copied == NULL) { PyErr_NoMemory(); return NULL; } - _excsnapshot_init(es); - return es; -} - -static void _excsnapshot_free(_excsnapshot *); // forward - -static void -_excsnapshot_clear(_excsnapshot *es) -{ - _objsnapshot_clear(&es->es_object); - if (es->es_msg != NULL) { - _rawstring_free(es->es_msg); - es->es_msg = NULL; - } - if (es->es_cause != NULL) { - _excsnapshot_free(es->es_cause); - es->es_cause = NULL; - } - if (es->es_context != NULL) { - _excsnapshot_free(es->es_context); - es->es_context = NULL; - } - es->es_suppress_context = 0; - if (es->es_traceback != NULL) { - _tbsnapshot_free(es->es_traceback); - es->es_traceback = NULL; - } -} - -static void -_excsnapshot_free(_excsnapshot *es) -{ - _excsnapshot_clear(es); - PyMem_Free(es); -} - -#ifndef NDEBUG -static int -_excsnapshot_is_clear(_excsnapshot *es) -{ - return es->es_suppress_context == 0 - && es->es_cause == NULL - && es->es_context == NULL - && es->es_traceback == NULL - && es->es_msg == NULL - && _objsnapshot_is_clear(&es->es_object); -} -#endif - -static PyObject * -_excsnapshot_get_exc_naive(_excsnapshot *es) -{ - _rawstring buf; - const char *msg = NULL; - if (es->es_msg != NULL) { - msg = es->es_msg->data; - } else { - _objsnapshot_summarize(&es->es_object, &buf, NULL); - if (buf.size > 0) { - msg = buf.data; - } - } - - PyObject *exc = NULL; - // XXX Use _objsnapshot_resolve_naive()? - const char *modname = es->es_object.modname.size > 0 - ? es->es_object.modname.data - : NULL; - PyObject *exctype = _pyobj_get_class(modname, es->es_object.clsname.data); - if (exctype != NULL) { - exc = _pyexc_create(exctype, msg, NULL); - Py_DECREF(exctype); - if (exc != NULL) { - return exc; - } - PyErr_Clear(); - } else { - PyErr_Clear(); - } - exctype = PyExc_Exception; - return _pyexc_create(exctype, msg, NULL); -} - -static PyObject * -_excsnapshot_get_exc(_excsnapshot *es) -{ - assert(!_objsnapshot_is_clear(&es->es_object)); - - PyObject *exc = _objsnapshot_resolve(&es->es_object); - if (exc == NULL) { - // Fall back to resolving the object. - PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL; - PyErr_Fetch(&curtype, &curexc, &curtb); - - exc = _excsnapshot_get_exc_naive(es); - if (exc == NULL) { - PyErr_Restore(curtype, curexc, curtb); - return NULL; - } - } - // People can do some weird stuff... - if (!PyExceptionInstance_Check(exc)) { - // We got a bogus "exception". - Py_DECREF(exc); - PyErr_SetString(PyExc_TypeError, "expected exception"); - return NULL; - } - return exc; -} - -static void _excsnapshot_extract(_excsnapshot *, PyObject *); -static void -_excsnapshot_extract(_excsnapshot *es, PyObject *excobj) -{ - assert(_excsnapshot_is_clear(es)); - assert(PyExceptionInstance_Check(excobj)); - - _objsnapshot_extract(&es->es_object, excobj); - - es->es_msg = _objsnapshot_get_minimal_summary(&es->es_object, excobj); - if (es->es_msg == NULL) { - PyErr_Clear(); - } - - PyBaseExceptionObject *exc = (PyBaseExceptionObject *)excobj; - - if (exc->cause != NULL && exc->cause != Py_None) { - es->es_cause = _excsnapshot_new(); - _excsnapshot_extract(es->es_cause, exc->cause); - } - - if (exc->context != NULL && exc->context != Py_None) { - es->es_context = _excsnapshot_new(); - _excsnapshot_extract(es->es_context, exc->context); - } - - es->es_suppress_context = exc->suppress_context; - - PyObject *tb = PyException_GetTraceback(excobj); - if (PyErr_Occurred()) { - IGNORE_FAILURE("could not get traceback"); - } else if (tb == Py_None) { - Py_DECREF(tb); - tb = NULL; - } - if (tb != NULL) { - es->es_traceback = _tbsnapshot_new(); - if (_tbsnapshot_extract(es->es_traceback, - (PyTracebackObject *)tb) != 0) { - IGNORE_FAILURE("could not extract __traceback__"); - } - } + strcpy(copied, str); + return copied; } -static PyObject * -_excsnapshot_resolve(_excsnapshot *es) +static PyInterpreterState * +_get_current(void) { - PyObject *exc = _excsnapshot_get_exc(es); - if (exc == NULL) { - return NULL; - } - - if (es->es_traceback != NULL) { - PyObject *tb = _tbsnapshot_resolve(es->es_traceback); - if (tb == NULL) { - // The snapshot is still somewhat useful without this. - IGNORE_FAILURE("could not deserialize traceback"); - } else { - // This does not steal references. - PyException_SetTraceback(exc, tb); - Py_DECREF(tb); - } - } - // NULL means "not set". - - if (es->es_context != NULL) { - PyObject *context = _excsnapshot_resolve(es->es_context); - if (context == NULL) { - // The snapshot is still useful without this. - IGNORE_FAILURE("could not deserialize __context__"); - } else { - // This steals references but we have one to give. - PyException_SetContext(exc, context); - } - } - // NULL means "not set". - - if (es->es_cause != NULL) { - PyObject *cause = _excsnapshot_resolve(es->es_cause); - if (cause == NULL) { - // The snapshot is still useful without this. - IGNORE_FAILURE("could not deserialize __cause__"); - } else { - // This steals references, but we have one to give. - PyException_SetCause(exc, cause); - } - } - // NULL means "not set". - - ((PyBaseExceptionObject *)exc)->suppress_context = es->es_suppress_context; - - return exc; + // PyInterpreterState_Get() aborts if lookup fails, so don't need + // to check the result for NULL. + return PyInterpreterState_Get(); } /* data-sharing-specific code ***********************************************/ -/* shared "object" */ - struct _sharednsitem { - _rawstring name; + char *name; _PyCrossInterpreterData data; }; @@ -935,7 +44,8 @@ static void _sharednsitem_clear(struct _sharednsitem *); // forward static int _sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value) { - if (_rawstring_from_pyobj(&item->name, key) != 0) { + item->name = _copy_raw_string(key); + if (item->name == NULL) { return -1; } if (_PyObject_GetCrossInterpreterData(value, &item->data) != 0) { @@ -948,14 +58,17 @@ _sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value) static void _sharednsitem_clear(struct _sharednsitem *item) { - _rawstring_clear(&item->name); + if (item->name != NULL) { + PyMem_Free(item->name); + item->name = NULL; + } _PyCrossInterpreterData_Release(&item->data); } static int _sharednsitem_apply(struct _sharednsitem *item, PyObject *ns) { - PyObject *name = PyUnicode_FromString(item->name.data); + PyObject *name = PyUnicode_FromString(item->name); if (name == NULL) { return -1; } @@ -1046,121 +159,121 @@ _sharedns_apply(_sharedns *shared, PyObject *ns) return 0; } -/* shared exception */ - // Ultimately we'd like to preserve enough information about the // exception and traceback that we could re-constitute (or at least // simulate, a la traceback.TracebackException), and even chain, a copy // of the exception in the calling interpreter. typedef struct _sharedexception { - _excsnapshot snapshot; - _rawstring msg; + char *name; + char *msg; } _sharedexception; -static void -_sharedexception_init(_sharedexception *she) -{ - _excsnapshot_init(&she->snapshot); - _rawstring_init(&she->msg); -} - static _sharedexception * _sharedexception_new(void) { - _sharedexception *she = PyMem_NEW(_sharedexception, 1); - if (she == NULL) { + _sharedexception *err = PyMem_NEW(_sharedexception, 1); + if (err == NULL) { PyErr_NoMemory(); return NULL; } - _sharedexception_init(she); - return she; + err->name = NULL; + err->msg = NULL; + return err; } static void -_sharedexception_clear(_sharedexception *she) +_sharedexception_clear(_sharedexception *exc) { - _excsnapshot_clear(&she->snapshot); - _rawstring_clear(&she->msg); + if (exc->name != NULL) { + PyMem_Free(exc->name); + } + if (exc->msg != NULL) { + PyMem_Free(exc->msg); + } } static void -_sharedexception_free(_sharedexception *she) +_sharedexception_free(_sharedexception *exc) { - _sharedexception_clear(she); - PyMem_Free(she); + _sharedexception_clear(exc); + PyMem_Free(exc); } -#ifndef NDEBUG -static int -_sharedexception_is_clear(_sharedexception *she) +static _sharedexception * +_sharedexception_bind(PyObject *exctype, PyObject *exc, PyObject *tb) { - return 1 - && _excsnapshot_is_clear(&she->snapshot) - && _rawstring_is_clear(&she->msg); -} -#endif + assert(exctype != NULL); + char *failure = NULL; -static PyObject * -_sharedexception_get_cause(_sharedexception *sharedexc) -{ - // FYI, "cause" is already normalized. - PyObject *cause = _excsnapshot_resolve(&sharedexc->snapshot); - if (cause == NULL) { - if (PyErr_Occurred()) { - IGNORE_FAILURE("could not deserialize exc snapshot"); - } - return NULL; + _sharedexception *err = _sharedexception_new(); + if (err == NULL) { + goto finally; } - // XXX Ensure "cause" has a traceback. - return cause; -} -static void -_sharedexception_extract(_sharedexception *she, PyObject *exc) -{ - assert(_sharedexception_is_clear(she)); - assert(exc != NULL); + PyObject *name = PyUnicode_FromFormat("%S", exctype); + if (name == NULL) { + failure = "unable to format exception type name"; + goto finally; + } + err->name = _copy_raw_string(name); + Py_DECREF(name); + if (err->name == NULL) { + if (PyErr_ExceptionMatches(PyExc_MemoryError)) { + failure = "out of memory copying exception type name"; + } else { + failure = "unable to encode and copy exception type name"; + } + goto finally; + } - _excsnapshot_extract(&she->snapshot, exc); + if (exc != NULL) { + PyObject *msg = PyUnicode_FromFormat("%S", exc); + if (msg == NULL) { + failure = "unable to format exception message"; + goto finally; + } + err->msg = _copy_raw_string(msg); + Py_DECREF(msg); + if (err->msg == NULL) { + if (PyErr_ExceptionMatches(PyExc_MemoryError)) { + failure = "out of memory copying exception message"; + } else { + failure = "unable to encode and copy exception message"; + } + goto finally; + } + } - // Compose the message. - const char *msg = NULL; - PyObject *msgobj = PyUnicode_FromFormat("%S", exc); - if (msgobj == NULL) { - IGNORE_FAILURE("unable to format exception message"); - } else { - msg = PyUnicode_AsUTF8(msgobj); - if (PyErr_Occurred()) { - PyErr_Clear(); +finally: + if (failure != NULL) { + PyErr_Clear(); + if (err->name != NULL) { + PyMem_Free(err->name); + err->name = NULL; } + err->msg = failure; } - _objsnapshot_summarize(&she->snapshot.es_object, &she->msg, msg); - Py_XDECREF(msgobj); + return err; } -static PyObject * -_sharedexception_resolve(_sharedexception *sharedexc, PyObject *wrapperclass) +static void +_sharedexception_apply(_sharedexception *exc, PyObject *wrapperclass) { - assert(!PyErr_Occurred()); - - // Get the exception object (already normalized). - PyObject *exc = _pyexc_create(wrapperclass, sharedexc->msg.data, NULL); - assert(exc != NULL); - - // Set __cause__, is possible. - PyObject *cause = _sharedexception_get_cause(sharedexc); - if (cause != NULL) { - // Set __context__. - Py_INCREF(cause); // PyException_SetContext() steals a reference. - PyException_SetContext(exc, cause); - - // Set __cause__. - Py_INCREF(cause); // PyException_SetCause() steals a reference. - PyException_SetCause(exc, cause); + if (exc->name != NULL) { + if (exc->msg != NULL) { + PyErr_Format(wrapperclass, "%s: %s", exc->name, exc->msg); + } + else { + PyErr_SetString(wrapperclass, exc->name); + } + } + else if (exc->msg != NULL) { + PyErr_SetString(wrapperclass, exc->msg); + } + else { + PyErr_SetNone(wrapperclass); } - - return exc; } @@ -2756,9 +1869,11 @@ _ensure_not_running(PyInterpreterState *interp) static int _run_script(PyInterpreterState *interp, const char *codestr, - _sharedns *shared, _sharedexception **pexc) + _sharedns *shared, _sharedexception **exc) { - assert(!PyErr_Occurred()); // ...in the called interpreter. + PyObject *exctype = NULL; + PyObject *excval = NULL; + PyObject *tb = NULL; PyObject *main_mod = _PyInterpreterState_GetMainModule(interp); if (main_mod == NULL) { @@ -2789,38 +1904,25 @@ _run_script(PyInterpreterState *interp, const char *codestr, Py_DECREF(result); // We throw away the result. } - *pexc = NULL; + *exc = NULL; return 0; - PyObject *exctype = NULL, *exc = NULL, *tb = NULL; error: - PyErr_Fetch(&exctype, &exc, &tb); - - // First normalize the exception. - PyErr_NormalizeException(&exctype, &exc, &tb); - assert(PyExceptionInstance_Check(exc)); - if (tb != NULL) { - PyException_SetTraceback(exc, tb); - } - - // Behave as though the exception was caught in this thread. - PyErr_SetExcInfo(exctype, exc, tb); // Like entering "except" block. + PyErr_Fetch(&exctype, &excval, &tb); - // Serialize the exception. - _sharedexception *sharedexc = _sharedexception_new(); + _sharedexception *sharedexc = _sharedexception_bind(exctype, excval, tb); + Py_XDECREF(exctype); + Py_XDECREF(excval); + Py_XDECREF(tb); if (sharedexc == NULL) { - IGNORE_FAILURE("script raised an uncaught exception"); - } else { - _sharedexception_extract(sharedexc, exc); + fprintf(stderr, "RunFailedError: script raised an uncaught exception"); + PyErr_Clear(); + sharedexc = NULL; + } + else { assert(!PyErr_Occurred()); } - - // Clear the exception. - PyErr_SetExcInfo(NULL, NULL, NULL); // Like leaving "except" block. - PyErr_Clear(); // Do not re-raise. - - // "Return" the serialized exception. - *pexc = sharedexc; + *exc = sharedexc; return -1; } @@ -2828,8 +1930,6 @@ static int _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, PyObject *shareables) { - assert(!PyErr_Occurred()); // ...in the calling interpreter. - if (_ensure_not_running(interp) < 0) { return -1; } @@ -2863,8 +1963,8 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, } // Run the script. - _sharedexception *sharedexc = NULL; - int result = _run_script(interp, codestr, shared, &sharedexc); + _sharedexception *exc = NULL; + int result = _run_script(interp, codestr, shared, &exc); // Switch back. if (save_tstate != NULL) { @@ -2873,14 +1973,9 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, #endif // Propagate any exception out to the caller. - if (sharedexc != NULL) { - assert(!PyErr_Occurred()); - PyObject *exc = _sharedexception_resolve(sharedexc, RunFailedError); - // XXX This is not safe once interpreters no longer share allocators. - _sharedexception_free(sharedexc); - PyObject *exctype = (PyObject *)Py_TYPE(exc); - Py_INCREF(exctype); // PyErr_Restore() steals a reference. - PyErr_Restore(exctype, exc, PyException_GetTraceback(exc)); + if (exc != NULL) { + _sharedexception_apply(exc, RunFailedError); + _sharedexception_free(exc); } else if (result != 0) { // We were unable to allocate a shared exception. From webhook-mailer at python.org Thu May 14 15:55:55 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 14 May 2020 19:55:55 -0000 Subject: [Python-checkins] bpo-40602: Write unit tests for _Py_hashtable_t (GH-20091) Message-ID: https://github.com/python/cpython/commit/a482dc500b6ec4889f6a126ba08cbad6c11e37bc commit: a482dc500b6ec4889f6a126ba08cbad6c11e37bc branch: master author: Victor Stinner committer: GitHub date: 2020-05-14T21:55:47+02:00 summary: bpo-40602: Write unit tests for _Py_hashtable_t (GH-20091) Cleanup also hashtable.c. Rename _Py_hashtable_t members: * Rename entries to nentries * Rename num_buckets to nbuckets files: M Include/internal/pycore_hashtable.h M Modules/_testinternalcapi.c M Python/hashtable.c M Python/marshal.c diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 2990f9e0c1cc6..18757abc28c19 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -48,18 +48,18 @@ typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t * const void *key); typedef struct { - /* allocate a memory block */ + // Allocate a memory block void* (*malloc) (size_t size); - /* release a memory block */ + // Release a memory block void (*free) (void *ptr); } _Py_hashtable_allocator_t; /* _Py_hashtable: table */ struct _Py_hashtable_t { - size_t num_buckets; - size_t entries; /* Total number of entries in the table. */ + size_t nentries; // Total number of entries in the table + size_t nbuckets; _Py_slist_t *buckets; _Py_hashtable_get_entry_func get_entry_func; @@ -70,10 +70,10 @@ struct _Py_hashtable_t { _Py_hashtable_allocator_t alloc; }; -/* hash a pointer (void*) */ +/* Hash a pointer (void*) */ PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr(const void *key); -/* comparison using memcmp() */ +/* Comparison using memcmp() */ PyAPI_FUNC(int) _Py_hashtable_compare_direct( const void *key1, const void *key2); @@ -129,13 +129,14 @@ _Py_hashtable_get_entry(_Py_hashtable_t *ht, const void *key) Use _Py_hashtable_get_entry() to distinguish entry value equal to NULL and entry not found. */ -extern void *_Py_hashtable_get(_Py_hashtable_t *ht, const void *key); +PyAPI_FUNC(void*) _Py_hashtable_get(_Py_hashtable_t *ht, const void *key); -// Remove a key and its associated value without calling key and value destroy -// functions. -// Return the removed value if the key was found. -// Return NULL if the key was not found. +/* Remove a key and its associated value without calling key and value destroy + functions. + + Return the removed value if the key was found. + Return NULL if the key was not found. */ PyAPI_FUNC(void*) _Py_hashtable_steal( _Py_hashtable_t *ht, const void *key); diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 1b7563cb20fc5..3ae387d945d76 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -14,6 +14,7 @@ #include "Python.h" #include "pycore_byteswap.h" // _Py_bswap32() #include "pycore_initconfig.h" // _Py_GetConfigsAsDict() +#include "pycore_hashtable.h" // _Py_hashtable_new() #include "pycore_gc.h" // PyGC_Head @@ -62,10 +63,97 @@ test_bswap(PyObject *self, PyObject *Py_UNUSED(args)) } +#define TO_PTR(ch) ((void*)(uintptr_t)ch) +#define FROM_PTR(ptr) ((uintptr_t)ptr) +#define VALUE(key) (1 + ((int)(key) - 'a')) + +static Py_uhash_t +hash_char(const void *key) +{ + char ch = (char)FROM_PTR(key); + return ch; +} + + +static int +hashtable_cb(_Py_hashtable_t *table, + const void *key_ptr, const void *value_ptr, + void *user_data) +{ + int *count = (int *)user_data; + char key = (char)FROM_PTR(key_ptr); + int value = (int)FROM_PTR(value_ptr); + assert(value == VALUE(key)); + *count += 1; + return 0; +} + + +static PyObject* +test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) +{ + _Py_hashtable_t *table = _Py_hashtable_new(hash_char, + _Py_hashtable_compare_direct); + if (table == NULL) { + return PyErr_NoMemory(); + } + + // Test _Py_hashtable_set() + char key; + for (key='a'; key <= 'z'; key++) { + int value = VALUE(key); + if (_Py_hashtable_set(table, TO_PTR(key), TO_PTR(value)) < 0) { + _Py_hashtable_destroy(table); + return PyErr_NoMemory(); + } + } + assert(table->nentries == 26); + assert(table->nbuckets > table->nentries); + + // Test _Py_hashtable_get_entry() + for (key='a'; key <= 'z'; key++) { + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry(table, TO_PTR(key)); + assert(entry != NULL); + assert(entry->key = TO_PTR(key)); + assert(entry->value = TO_PTR(VALUE(key))); + } + + // Test _Py_hashtable_get() + for (key='a'; key <= 'z'; key++) { + void *value_ptr = _Py_hashtable_get(table, TO_PTR(key)); + int value = (int)FROM_PTR(value_ptr); + assert(value == VALUE(key)); + } + + // Test _Py_hashtable_steal() + key = 'p'; + void *value_ptr = _Py_hashtable_steal(table, TO_PTR(key)); + int value = (int)FROM_PTR(value_ptr); + assert(value == VALUE(key)); + + assert(table->nentries == 25); + + // Test _Py_hashtable_foreach() + int count = 0; + int res = _Py_hashtable_foreach(table, hashtable_cb, &count); + assert(res == 0); + assert(count == 25); + + // Test _Py_hashtable_clear() + _Py_hashtable_clear(table); + assert(table->nentries == 0); + assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL); + + _Py_hashtable_destroy(table); + Py_RETURN_NONE; +} + + static PyMethodDef TestMethods[] = { {"get_configs", get_configs, METH_NOARGS}, {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, {"test_bswap", test_bswap, METH_NOARGS}, + {"test_hashtable", test_hashtable, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; diff --git a/Python/hashtable.c b/Python/hashtable.c index d1467ad94ed55..45c52859ac2d6 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -119,66 +119,20 @@ round_size(size_t s) size_t _Py_hashtable_size(const _Py_hashtable_t *ht) { - size_t size; - - size = sizeof(_Py_hashtable_t); - + size_t size = sizeof(_Py_hashtable_t); /* buckets */ - size += ht->num_buckets * sizeof(_Py_hashtable_entry_t *); - + size += ht->nbuckets * sizeof(_Py_hashtable_entry_t *); /* entries */ - size += ht->entries * sizeof(_Py_hashtable_entry_t); - + size += ht->nentries * sizeof(_Py_hashtable_entry_t); return size; } -#ifdef Py_DEBUG -void -_Py_hashtable_print_stats(_Py_hashtable_t *ht) -{ - size_t size; - size_t chain_len, max_chain_len, total_chain_len, nchains; - _Py_hashtable_entry_t *entry; - size_t hv; - double load; - - size = _Py_hashtable_size(ht); - - load = (double)ht->entries / ht->num_buckets; - - max_chain_len = 0; - total_chain_len = 0; - nchains = 0; - for (hv = 0; hv < ht->num_buckets; hv++) { - entry = TABLE_HEAD(ht, hv); - if (entry != NULL) { - chain_len = 0; - for (; entry; entry = ENTRY_NEXT(entry)) { - chain_len++; - } - if (chain_len > max_chain_len) - max_chain_len = chain_len; - total_chain_len += chain_len; - nchains++; - } - } - printf("hash table %p: entries=%" - PY_FORMAT_SIZE_T "u/%" PY_FORMAT_SIZE_T "u (%.0f%%), ", - (void *)ht, ht->entries, ht->num_buckets, load * 100.0); - if (nchains) - printf("avg_chain_len=%.1f, ", (double)total_chain_len / nchains); - printf("max_chain_len=%" PY_FORMAT_SIZE_T "u, %" PY_FORMAT_SIZE_T "u KiB\n", - max_chain_len, size / 1024); -} -#endif - - _Py_hashtable_entry_t * _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) { Py_uhash_t key_hash = ht->hash_func(key); - size_t index = key_hash & (ht->num_buckets - 1); + size_t index = key_hash & (ht->nbuckets - 1); _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); while (1) { if (entry == NULL) { @@ -200,7 +154,7 @@ static _Py_hashtable_entry_t * _Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) { Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); - size_t index = key_hash & (ht->num_buckets - 1); + size_t index = key_hash & (ht->nbuckets - 1); _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); while (1) { if (entry == NULL) { @@ -220,7 +174,7 @@ void* _Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) { Py_uhash_t key_hash = ht->hash_func(key); - size_t index = key_hash & (ht->num_buckets - 1); + size_t index = key_hash & (ht->nbuckets - 1); _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, index); _Py_hashtable_entry_t *previous = NULL; @@ -238,12 +192,12 @@ _Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) _Py_slist_remove(&ht->buckets[index], (_Py_slist_item_t *)previous, (_Py_slist_item_t *)entry); - ht->entries--; + ht->nentries--; void *value = entry->value; ht->alloc.free(entry); - if ((float)ht->entries / (float)ht->num_buckets < HASHTABLE_LOW) { + if ((float)ht->nentries / (float)ht->nbuckets < HASHTABLE_LOW) { hashtable_rehash(ht); } return value; @@ -263,8 +217,6 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value) assert(entry == NULL); #endif - Py_uhash_t key_hash = ht->hash_func(key); - size_t index = key_hash & (ht->num_buckets - 1); entry = ht->alloc.malloc(sizeof(_Py_hashtable_entry_t)); if (entry == NULL) { @@ -272,15 +224,17 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value) return -1; } - entry->key_hash = key_hash; + entry->key_hash = ht->hash_func(key); entry->key = (void *)key; entry->value = value; + size_t index = entry->key_hash & (ht->nbuckets - 1); _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); - ht->entries++; + ht->nentries++; - if ((float)ht->entries / (float)ht->num_buckets > HASHTABLE_HIGH) + if ((float)ht->nentries / (float)ht->nbuckets > HASHTABLE_HIGH) { hashtable_rehash(ht); + } return 0; } @@ -303,14 +257,14 @@ _Py_hashtable_foreach(_Py_hashtable_t *ht, _Py_hashtable_foreach_func func, void *user_data) { - _Py_hashtable_entry_t *entry; - size_t hv; - - for (hv = 0; hv < ht->num_buckets; hv++) { - for (entry = TABLE_HEAD(ht, hv); entry; entry = ENTRY_NEXT(entry)) { + for (size_t hv = 0; hv < ht->nbuckets; hv++) { + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, hv); + while (entry != NULL) { int res = func(ht, entry->key, entry->value, user_data); - if (res) + if (res) { return res; + } + entry = ENTRY_NEXT(entry); } } return 0; @@ -320,44 +274,35 @@ _Py_hashtable_foreach(_Py_hashtable_t *ht, static void hashtable_rehash(_Py_hashtable_t *ht) { - size_t buckets_size, new_size, bucket; - _Py_slist_t *old_buckets = NULL; - size_t old_num_buckets; - - new_size = round_size((size_t)(ht->entries * HASHTABLE_REHASH_FACTOR)); - if (new_size == ht->num_buckets) + size_t new_size = round_size((size_t)(ht->nentries * HASHTABLE_REHASH_FACTOR)); + if (new_size == ht->nbuckets) { return; + } - old_num_buckets = ht->num_buckets; - - buckets_size = new_size * sizeof(ht->buckets[0]); - old_buckets = ht->buckets; - ht->buckets = ht->alloc.malloc(buckets_size); - if (ht->buckets == NULL) { - /* cancel rehash on memory allocation failure */ - ht->buckets = old_buckets ; + size_t buckets_size = new_size * sizeof(ht->buckets[0]); + _Py_slist_t *new_buckets = ht->alloc.malloc(buckets_size); + if (new_buckets == NULL) { /* memory allocation failed */ return; } - memset(ht->buckets, 0, buckets_size); - - ht->num_buckets = new_size; - - for (bucket = 0; bucket < old_num_buckets; bucket++) { - _Py_hashtable_entry_t *entry, *next; - for (entry = BUCKETS_HEAD(old_buckets[bucket]); entry != NULL; entry = next) { - size_t entry_index; - + memset(new_buckets, 0, buckets_size); + for (size_t bucket = 0; bucket < ht->nbuckets; bucket++) { + _Py_hashtable_entry_t *entry = BUCKETS_HEAD(ht->buckets[bucket]); + while (entry != NULL) { assert(ht->hash_func(entry->key) == entry->key_hash); - next = ENTRY_NEXT(entry); - entry_index = entry->key_hash & (new_size - 1); + _Py_hashtable_entry_t *next = ENTRY_NEXT(entry); + size_t entry_index = entry->key_hash & (new_size - 1); + + _Py_slist_prepend(&new_buckets[entry_index], (_Py_slist_item_t*)entry); - _Py_slist_prepend(&ht->buckets[entry_index], (_Py_slist_item_t*)entry); + entry = next; } } - ht->alloc.free(old_buckets); + ht->alloc.free(ht->buckets); + ht->nbuckets = new_size; + ht->buckets = new_buckets; } @@ -368,10 +313,7 @@ _Py_hashtable_new_full(_Py_hashtable_hash_func hash_func, _Py_hashtable_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator) { - _Py_hashtable_t *ht; - size_t buckets_size; _Py_hashtable_allocator_t alloc; - if (allocator == NULL) { alloc.malloc = PyMem_Malloc; alloc.free = PyMem_Free; @@ -380,14 +322,15 @@ _Py_hashtable_new_full(_Py_hashtable_hash_func hash_func, alloc = *allocator; } - ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); - if (ht == NULL) + _Py_hashtable_t *ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); + if (ht == NULL) { return ht; + } - ht->num_buckets = HASHTABLE_MIN_SIZE; - ht->entries = 0; + ht->nbuckets = HASHTABLE_MIN_SIZE; + ht->nentries = 0; - buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); + size_t buckets_size = ht->nbuckets * sizeof(ht->buckets[0]); ht->buckets = alloc.malloc(buckets_size); if (ht->buckets == NULL) { alloc.free(ht); @@ -435,17 +378,16 @@ _Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry) void _Py_hashtable_clear(_Py_hashtable_t *ht) { - _Py_hashtable_entry_t *entry, *next; - size_t i; - - for (i=0; i < ht->num_buckets; i++) { - for (entry = TABLE_HEAD(ht, i); entry != NULL; entry = next) { - next = ENTRY_NEXT(entry); + for (size_t i=0; i < ht->nbuckets; i++) { + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i); + while (entry != NULL) { + _Py_hashtable_entry_t *next = ENTRY_NEXT(entry); _Py_hashtable_destroy_entry(ht, entry); + entry = next; } _Py_slist_init(&ht->buckets[i]); } - ht->entries = 0; + ht->nentries = 0; hashtable_rehash(ht); } @@ -453,7 +395,7 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) void _Py_hashtable_destroy(_Py_hashtable_t *ht) { - for (size_t i = 0; i < ht->num_buckets; i++) { + for (size_t i = 0; i < ht->nbuckets; i++) { _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i); while (entry) { _Py_hashtable_entry_t *entry_next = ENTRY_NEXT(entry); diff --git a/Python/marshal.c b/Python/marshal.c index b096ff8932220..a0f6b9812601b 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -312,7 +312,7 @@ w_ref(PyObject *v, char *flag, WFILE *p) w_long(w, p); return 1; } else { - size_t s = p->hashtable->entries; + size_t s = p->hashtable->nentries; /* we don't support long indices */ if (s >= 0x7fffffff) { PyErr_SetString(PyExc_ValueError, "too many objects"); From webhook-mailer at python.org Thu May 14 16:11:55 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Thu, 14 May 2020 20:11:55 -0000 Subject: [Python-checkins] bpo-40619: Correctly handle error lines in programs without file mode (GH-20090) Message-ID: https://github.com/python/cpython/commit/bcc30360951a303aa72b0502b77aad2c5f09f30d commit: bcc30360951a303aa72b0502b77aad2c5f09f30d branch: master author: Pablo Galindo committer: GitHub date: 2020-05-14T21:11:48+01:00 summary: bpo-40619: Correctly handle error lines in programs without file mode (GH-20090) files: M Lib/test/test_exceptions.py M Parser/pegen/pegen.c diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index b689ec7aed18d..efd77fdbaabe1 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -228,6 +228,8 @@ def bar(): def baz(): '''quux''' """, 9, 20) + check("pass\npass\npass\n(1+)\npass\npass\npass", 4, 4) + check("(1+)", 1, 4) # Errors thrown by symtable.c check('x = [(yield i) for i in range(3)]', 1, 5) diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 083088bd9657b..8b79a7364758e 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -300,30 +300,6 @@ raise_tokenizer_init_error(PyObject *filename) Py_XDECREF(tuple); } -static inline PyObject * -get_error_line(char *buffer, int is_file) -{ - const char *newline; - if (is_file) { - newline = strrchr(buffer, '\n'); - } else { - newline = strchr(buffer, '\n'); - } - - if (is_file) { - while (newline > buffer && newline[-1] == '\n') { - --newline; - } - } - - if (newline) { - return PyUnicode_DecodeUTF8(buffer, newline - buffer, "replace"); - } - else { - return PyUnicode_DecodeUTF8(buffer, strlen(buffer), "replace"); - } -} - static int tokenizer_error(Parser *p) { @@ -422,7 +398,11 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, } if (!error_line) { - error_line = get_error_line(p->tok->buf, p->start_rule == Py_file_input); + Py_ssize_t size = p->tok->inp - p->tok->buf; + if (size && p->tok->buf[size-1] == '\n') { + size--; + } + error_line = PyUnicode_DecodeUTF8(p->tok->buf, size, "replace"); if (!error_line) { goto error; } From webhook-mailer at python.org Thu May 14 16:13:55 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Thu, 14 May 2020 20:13:55 -0000 Subject: [Python-checkins] bpo-40618: Disallow invalid targets in augassign and except clauses (GH-20083) Message-ID: https://github.com/python/cpython/commit/ce21cfca7bb2d18921bc4ac27cb064726996c519 commit: ce21cfca7bb2d18921bc4ac27cb064726996c519 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-14T21:13:50+01:00 summary: bpo-40618: Disallow invalid targets in augassign and except clauses (GH-20083) This commit fixes the new parser to disallow invalid targets in the following scenarios: - Augmented assignments must only accept a single target (Name, Attribute or Subscript), but no tuples or lists. - `except` clauses should only accept a single `Name` as a target. Co-authored-by: Pablo Galindo files: M Grammar/python.gram M Lib/test/test_grammar.py M Lib/test/test_peg_parser.py M Lib/test/test_syntax.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 84c89330e3ee9..9087c7aa718b1 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -89,12 +89,12 @@ assignment[stmt_ty]: "Variable annotation syntax is", _Py_AnnAssign(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, c, 1, EXTRA) ) } - | a=('(' b=inside_paren_ann_assign_target ')' { b } - | ann_assign_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] { + | a=('(' b=single_target ')' { b } + | single_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] { CHECK_VERSION(6, "Variable annotations syntax is", _Py_AnnAssign(a, b, c, 0, EXTRA)) } | a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) tc=[TYPE_COMMENT] { _Py_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | a=target b=augassign c=(yield_expr | star_expressions) { + | a=single_target b=augassign c=(yield_expr | star_expressions) { _Py_AugAssign(a, b->kind, c, EXTRA) } | invalid_assignment @@ -185,7 +185,7 @@ try_stmt[stmt_ty]: | 'try' ':' b=block f=finally_block { _Py_Try(b, NULL, NULL, f, EXTRA) } | 'try' ':' b=block ex=except_block+ el=[else_block] f=[finally_block] { _Py_Try(b, ex, el, f, EXTRA) } except_block[excepthandler_ty]: - | 'except' e=expression t=['as' z=target { z }] ':' b=block { + | 'except' e=expression t=['as' z=NAME { z }] ':' b=block { _Py_ExceptHandler(e, (t) ? ((expr_ty) t)->v.Name.id : NULL, b, EXTRA) } | 'except' ':' b=block { _Py_ExceptHandler(NULL, NULL, b, EXTRA) } finally_block[asdl_seq*]: 'finally' ':' a=block { a } @@ -573,12 +573,11 @@ star_atom[expr_ty]: | '(' a=[star_targets_seq] ')' { _Py_Tuple(a, Store, EXTRA) } | '[' a=[star_targets_seq] ']' { _Py_List(a, Store, EXTRA) } -inside_paren_ann_assign_target[expr_ty]: - | ann_assign_subscript_attribute_target +single_target[expr_ty]: + | single_subscript_attribute_target | a=NAME { _PyPegen_set_expr_context(p, a, Store) } - | '(' a=inside_paren_ann_assign_target ')' { a } - -ann_assign_subscript_attribute_target[expr_ty]: + | '(' a=single_target ')' { a } +single_subscript_attribute_target[expr_ty]: | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) } | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) } diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index 02ba8a8b1579a..e1a402e2b463b 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -1279,7 +1279,7 @@ def __getitem__(self, i): def test_try(self): ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] ### | 'try' ':' suite 'finally' ':' suite - ### except_clause: 'except' [expr ['as' expr]] + ### except_clause: 'except' [expr ['as' NAME]] try: 1/0 except ZeroDivisionError: @@ -1297,6 +1297,9 @@ def test_try(self): except (EOFError, TypeError, ZeroDivisionError) as msg: pass try: pass finally: pass + with self.assertRaises(SyntaxError): + compile("try:\n pass\nexcept Exception as a.b:\n pass", "?", "exec") + compile("try:\n pass\nexcept Exception as a[b]:\n pass", "?", "exec") def test_suite(self): # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index df2d46d8827f0..71e071940de2f 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -35,6 +35,9 @@ ('attribute_simple', 'a.b'), ('attributes_subscript', 'a.b[0]'), ('augmented_assignment', 'x += 42'), + ('augmented_assignment_attribute', 'a.b.c += 42'), + ('augmented_assignment_paren', '(x) += 42'), + ('augmented_assignment_paren_subscript', '(x[0]) -= 42'), ('binop_add', '1 + 1'), ('binop_add_multiple', '1 + 1 + 1 + 1'), ('binop_all', '1 + 2 * 5 + 3 ** 2 - -3'), @@ -547,6 +550,11 @@ def f(*a, b): with a as (x, y): pass '''), + ('with_list_target', + ''' + with a as [x, y]: + pass + '''), ('yield', 'yield'), ('yield_expr', 'yield a'), ('yield_from', 'yield from a'), @@ -560,6 +568,9 @@ def f(*a, b): ("annotation_tuple", "(a,): int"), ("annotation_tuple_without_paren", "a,: int"), ("assignment_keyword", "a = if"), + ("augmented_assignment_list", "[a, b] += 1"), + ("augmented_assignment_tuple", "a, b += 1"), + ("augmented_assignment_tuple_paren", "(a, b) += (1, 2)"), ("comprehension_lambda", "(a for a in lambda: b)"), ("comprehension_else", "(a for a in b if c else d"), ("del_call", "del a()"), @@ -589,6 +600,20 @@ def f(): a """), ("not_terminated_string", "a = 'example"), + ("try_except_attribute_target", + """ + try: + pass + except Exception as a.b: + pass + """), + ("try_except_subscript_target", + """ + try: + pass + except Exception as a[0]: + pass + """), ] FAIL_SPECIALIZED_MESSAGE_CASES = [ diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 06636ae8a149a..a3a101534628a 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -129,6 +129,18 @@ Traceback (most recent call last): SyntaxError: cannot assign to conditional expression +>>> a, b += 1, 2 +Traceback (most recent call last): +SyntaxError: invalid syntax + +>>> (a, b) += 1, 2 +Traceback (most recent call last): +SyntaxError: cannot assign to tuple + +>>> [a, b] += 1, 2 +Traceback (most recent call last): +SyntaxError: cannot assign to list + From compiler_complex_args(): >>> def f(None=1): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index b1b248187ea3e..851d17226d162 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -199,8 +199,8 @@ static KeywordToken *reserved_keywords[] = { #define star_targets_seq_type 1128 #define star_target_type 1129 #define star_atom_type 1130 -#define inside_paren_ann_assign_target_type 1131 -#define ann_assign_subscript_attribute_target_type 1132 +#define single_target_type 1131 +#define single_subscript_attribute_target_type 1132 #define del_targets_type 1133 #define del_target_type 1134 #define del_t_atom_type 1135 @@ -501,8 +501,8 @@ static expr_ty star_targets_rule(Parser *p); static asdl_seq* star_targets_seq_rule(Parser *p); static expr_ty star_target_rule(Parser *p); static expr_ty star_atom_rule(Parser *p); -static expr_ty inside_paren_ann_assign_target_rule(Parser *p); -static expr_ty ann_assign_subscript_attribute_target_rule(Parser *p); +static expr_ty single_target_rule(Parser *p); +static expr_ty single_subscript_attribute_target_rule(Parser *p); static asdl_seq* del_targets_rule(Parser *p); static expr_ty del_target_rule(Parser *p); static expr_ty del_t_atom_rule(Parser *p); @@ -1590,9 +1590,9 @@ compound_stmt_rule(Parser *p) // assignment: // | NAME ':' expression ['=' annotated_rhs] -// | ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs] +// | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] // | ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? -// | target augassign (yield_expr | star_expressions) +// | single_target augassign (yield_expr | star_expressions) // | invalid_assignment static stmt_ty assignment_rule(Parser *p) @@ -1642,13 +1642,13 @@ assignment_rule(Parser *p) } p->mark = _mark; } - { // ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs] + { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] Token * _literal; void *a; expr_ty b; void *c; if ( - (a = _tmp_20_rule(p)) // '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target + (a = _tmp_20_rule(p)) // '(' single_target ')' | single_subscript_attribute_target && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -1703,12 +1703,12 @@ assignment_rule(Parser *p) } p->mark = _mark; } - { // target augassign (yield_expr | star_expressions) + { // single_target augassign (yield_expr | star_expressions) expr_ty a; AugOperator* b; void *c; if ( - (a = target_rule(p)) // target + (a = single_target_rule(p)) // single_target && (b = augassign_rule(p)) // augassign && @@ -3350,7 +3350,7 @@ try_stmt_rule(Parser *p) return _res; } -// except_block: 'except' expression ['as' target] ':' block | 'except' ':' block +// except_block: 'except' expression ['as' NAME] ':' block | 'except' ':' block static excepthandler_ty except_block_rule(Parser *p) { @@ -3367,7 +3367,7 @@ except_block_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'except' expression ['as' target] ':' block + { // 'except' expression ['as' NAME] ':' block Token * _keyword; Token * _literal; asdl_seq* b; @@ -3378,7 +3378,7 @@ except_block_rule(Parser *p) && (e = expression_rule(p)) // expression && - (t = _tmp_48_rule(p), 1) // ['as' target] + (t = _tmp_48_rule(p), 1) // ['as' NAME] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -9605,25 +9605,22 @@ star_atom_rule(Parser *p) return _res; } -// inside_paren_ann_assign_target: -// | ann_assign_subscript_attribute_target -// | NAME -// | '(' inside_paren_ann_assign_target ')' +// single_target: single_subscript_attribute_target | NAME | '(' single_target ')' static expr_ty -inside_paren_ann_assign_target_rule(Parser *p) +single_target_rule(Parser *p) { if (p->error_indicator) { return NULL; } expr_ty _res = NULL; int _mark = p->mark; - { // ann_assign_subscript_attribute_target - expr_ty ann_assign_subscript_attribute_target_var; + { // single_subscript_attribute_target + expr_ty single_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target ) { - _res = ann_assign_subscript_attribute_target_var; + _res = single_subscript_attribute_target_var; goto done; } p->mark = _mark; @@ -9643,14 +9640,14 @@ inside_paren_ann_assign_target_rule(Parser *p) } p->mark = _mark; } - { // '(' inside_paren_ann_assign_target ')' + { // '(' single_target ')' Token * _literal; Token * _literal_1; expr_ty a; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target + (a = single_target_rule(p)) // single_target && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) @@ -9669,11 +9666,11 @@ inside_paren_ann_assign_target_rule(Parser *p) return _res; } -// ann_assign_subscript_attribute_target: +// single_subscript_attribute_target: // | t_primary '.' NAME !t_lookahead // | t_primary '[' slices ']' !t_lookahead static expr_ty -ann_assign_subscript_attribute_target_rule(Parser *p) +single_subscript_attribute_target_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -11907,7 +11904,7 @@ _tmp_19_rule(Parser *p) return _res; } -// _tmp_20: '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target +// _tmp_20: '(' single_target ')' | single_subscript_attribute_target static void * _tmp_20_rule(Parser *p) { @@ -11916,14 +11913,14 @@ _tmp_20_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // '(' inside_paren_ann_assign_target ')' + { // '(' single_target ')' Token * _literal; Token * _literal_1; expr_ty b; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target + (b = single_target_rule(p)) // single_target && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) @@ -11937,13 +11934,13 @@ _tmp_20_rule(Parser *p) } p->mark = _mark; } - { // ann_assign_subscript_attribute_target - expr_ty ann_assign_subscript_attribute_target_var; + { // single_subscript_attribute_target + expr_ty single_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target ) { - _res = ann_assign_subscript_attribute_target_var; + _res = single_subscript_attribute_target_var; goto done; } p->mark = _mark; @@ -13073,7 +13070,7 @@ _loop1_47_rule(Parser *p) return _seq; } -// _tmp_48: 'as' target +// _tmp_48: 'as' NAME static void * _tmp_48_rule(Parser *p) { @@ -13082,13 +13079,13 @@ _tmp_48_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // 'as' target + { // 'as' NAME Token * _keyword; expr_ty z; if ( (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = target_rule(p)) // target + (z = _PyPegen_name_token(p)) // NAME ) { _res = z; From webhook-mailer at python.org Thu May 14 16:44:39 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 14 May 2020 20:44:39 -0000 Subject: [Python-checkins] bpo-40602: _Py_hashtable_set() reports rehash failure (GH-20077) Message-ID: https://github.com/python/cpython/commit/d2dc827d16479d99927a6923a0347199d7c694fb commit: d2dc827d16479d99927a6923a0347199d7c694fb branch: master author: Victor Stinner committer: GitHub date: 2020-05-14T22:44:32+02:00 summary: bpo-40602: _Py_hashtable_set() reports rehash failure (GH-20077) If _Py_hashtable_set() fails to grow the hash table (rehash), it now fails rather than ignoring the error. files: M Modules/_testinternalcapi.c M Python/hashtable.c diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 3ae387d945d76..5f217dcb8978e 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -98,6 +98,11 @@ test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) return PyErr_NoMemory(); } + // Using an newly allocated table must not crash + assert(table->nentries == 0); + assert(table->nbuckets > 0); + assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL); + // Test _Py_hashtable_set() char key; for (key='a'; key <= 'z'; key++) { @@ -121,17 +126,15 @@ test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) // Test _Py_hashtable_get() for (key='a'; key <= 'z'; key++) { void *value_ptr = _Py_hashtable_get(table, TO_PTR(key)); - int value = (int)FROM_PTR(value_ptr); - assert(value == VALUE(key)); + assert((int)FROM_PTR(value_ptr) == VALUE(key)); } // Test _Py_hashtable_steal() key = 'p'; void *value_ptr = _Py_hashtable_steal(table, TO_PTR(key)); - int value = (int)FROM_PTR(value_ptr); - assert(value == VALUE(key)); - + assert((int)FROM_PTR(value_ptr) == VALUE(key)); assert(table->nentries == 25); + assert(_Py_hashtable_get_entry(table, TO_PTR(key)) == NULL); // Test _Py_hashtable_foreach() int count = 0; @@ -142,6 +145,7 @@ test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) // Test _Py_hashtable_clear() _Py_hashtable_clear(table); assert(table->nentries == 0); + assert(table->nbuckets > 0); assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL); _Py_hashtable_destroy(table); diff --git a/Python/hashtable.c b/Python/hashtable.c index 45c52859ac2d6..b92e8ca08c7e1 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -60,7 +60,7 @@ ((_Py_hashtable_entry_t *)_Py_SLIST_ITEM_NEXT(ENTRY)) /* Forward declaration */ -static void hashtable_rehash(_Py_hashtable_t *ht); +static int hashtable_rehash(_Py_hashtable_t *ht); static void _Py_slist_init(_Py_slist_t *list) @@ -198,6 +198,7 @@ _Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) ht->alloc.free(entry); if ((float)ht->nentries / (float)ht->nbuckets < HASHTABLE_LOW) { + // Ignore failure: error cannot be reported to the caller hashtable_rehash(ht); } return value; @@ -228,13 +229,17 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value) entry->key = (void *)key; entry->value = value; - size_t index = entry->key_hash & (ht->nbuckets - 1); - _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); ht->nentries++; - if ((float)ht->nentries / (float)ht->nbuckets > HASHTABLE_HIGH) { - hashtable_rehash(ht); + if (hashtable_rehash(ht) < 0) { + ht->nentries--; + ht->alloc.free(entry); + return -1; + } } + + size_t index = entry->key_hash & (ht->nbuckets - 1); + _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); return 0; } @@ -271,19 +276,19 @@ _Py_hashtable_foreach(_Py_hashtable_t *ht, } -static void +static int hashtable_rehash(_Py_hashtable_t *ht) { size_t new_size = round_size((size_t)(ht->nentries * HASHTABLE_REHASH_FACTOR)); if (new_size == ht->nbuckets) { - return; + return 0; } size_t buckets_size = new_size * sizeof(ht->buckets[0]); _Py_slist_t *new_buckets = ht->alloc.malloc(buckets_size); if (new_buckets == NULL) { /* memory allocation failed */ - return; + return -1; } memset(new_buckets, 0, buckets_size); @@ -303,6 +308,7 @@ hashtable_rehash(_Py_hashtable_t *ht) ht->alloc.free(ht->buckets); ht->nbuckets = new_size; ht->buckets = new_buckets; + return 0; } @@ -388,7 +394,9 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) _Py_slist_init(&ht->buckets[i]); } ht->nentries = 0; - hashtable_rehash(ht); + // Ignore failure: clear function is not expected to fail + // because of a memory allocation failure. + (void)hashtable_rehash(ht); } From webhook-mailer at python.org Thu May 14 18:51:59 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 14 May 2020 22:51:59 -0000 Subject: [Python-checkins] bpo-40460: Fix typo in idlelib/zzdummy.py (GH-20093) Message-ID: https://github.com/python/cpython/commit/edf2643bbb9859403239fe1cb3c212b1a2a8e65c commit: edf2643bbb9859403239fe1cb3c212b1a2a8e65c branch: master author: Victor Stinner committer: GitHub date: 2020-05-15T00:51:51+02:00 summary: bpo-40460: Fix typo in idlelib/zzdummy.py (GH-20093) Replace ztest with ztext. files: M Lib/idlelib/zzdummy.py diff --git a/Lib/idlelib/zzdummy.py b/Lib/idlelib/zzdummy.py index 8084499646653..3c4b1d23b0d37 100644 --- a/Lib/idlelib/zzdummy.py +++ b/Lib/idlelib/zzdummy.py @@ -28,7 +28,7 @@ def z_in_event(self, event): text = self.text text.undo_block_start() for line in range(1, text.index('end')): - text.insert('%d.0', ztest) + text.insert('%d.0', ztext) text.undo_block_stop() return "break" From webhook-mailer at python.org Thu May 14 19:02:18 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 14 May 2020 23:02:18 -0000 Subject: [Python-checkins] bpo-40462: Fix typo in test_json (GH-20094) Message-ID: https://github.com/python/cpython/commit/4b972faf605912092013a1fdbf486c498d002926 commit: 4b972faf605912092013a1fdbf486c498d002926 branch: master author: Victor Stinner committer: GitHub date: 2020-05-15T01:02:10+02:00 summary: bpo-40462: Fix typo in test_json (GH-20094) files: M Lib/test/test_json/test_recursion.py diff --git a/Lib/test/test_json/test_recursion.py b/Lib/test/test_json/test_recursion.py index 877dc448b14c1..543c62839b2cd 100644 --- a/Lib/test/test_json/test_recursion.py +++ b/Lib/test/test_json/test_recursion.py @@ -52,7 +52,7 @@ def default(self, o): return [JSONTestObject] else: return 'JSONTestObject' - return pyjson.JSONEncoder.default(o) + return self.json.JSONEncoder.default(o) enc = RecursiveJSONEncoder() self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"') From webhook-mailer at python.org Thu May 14 20:59:54 2020 From: webhook-mailer at python.org (Joannah Nanjekye) Date: Fri, 15 May 2020 00:59:54 -0000 Subject: [Python-checkins] bpo-38872: Document exec symbol for codeop.compile_command (GH-20047) Message-ID: https://github.com/python/cpython/commit/7ba1f75f3f02b4b50ac6d7e17d15e467afa36aac commit: 7ba1f75f3f02b4b50ac6d7e17d15e467afa36aac branch: master author: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> committer: GitHub date: 2020-05-14T21:59:46-03:00 summary: bpo-38872: Document exec symbol for codeop.compile_command (GH-20047) * Document exec symbol for codeop.compile_command * Remove extra statements Co-authored-by: nanjekyejoannah files: M Doc/library/code.rst M Doc/library/codeop.rst M Lib/codeop.py diff --git a/Doc/library/code.rst b/Doc/library/code.rst index 6708079f778c1..538e5afc7822a 100644 --- a/Doc/library/code.rst +++ b/Doc/library/code.rst @@ -56,8 +56,8 @@ build applications which provide an interactive interpreter prompt. *source* is the source string; *filename* is the optional filename from which source was read, defaulting to ``''``; and *symbol* is the optional - grammar start symbol, which should be either ``'single'`` (the default) or - ``'eval'``. + grammar start symbol, which should be ``'single'`` (the default), ``'eval'`` + or ``'exec'``. Returns a code object (the same as ``compile(source, filename, symbol)``) if the command is complete and valid; ``None`` if the command is incomplete; raises diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst index a52d2c62c4fea..c66b9d3ec0a26 100644 --- a/Doc/library/codeop.rst +++ b/Doc/library/codeop.rst @@ -43,8 +43,9 @@ To do just the former: :exc:`OverflowError` or :exc:`ValueError` if there is an invalid literal. The *symbol* argument determines whether *source* is compiled as a statement - (``'single'``, the default) or as an :term:`expression` (``'eval'``). Any - other value will cause :exc:`ValueError` to be raised. + (``'single'``, the default), as a sequence of statements (``'exec'``) or + as an :term:`expression` (``'eval'``). Any other value will + cause :exc:`ValueError` to be raised. .. note:: diff --git a/Lib/codeop.py b/Lib/codeop.py index 082285f94fe84..835e68c09ba27 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -112,7 +112,8 @@ def compile_command(source, filename="", symbol="single"): source -- the source string; may contain \n characters filename -- optional filename from which source was read; default "" - symbol -- optional grammar start symbol; "single" (default) or "eval" + symbol -- optional grammar start symbol; "single" (default), "exec" + or "eval" Return value / exceptions raised: From webhook-mailer at python.org Thu May 14 21:04:58 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 15 May 2020 01:04:58 -0000 Subject: [Python-checkins] bpo-40334: Correctly identify invalid target in assignment errors (GH-20076) Message-ID: https://github.com/python/cpython/commit/16ab07063cb564c1937714bd39d6915172f005b5 commit: 16ab07063cb564c1937714bd39d6915172f005b5 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-15T02:04:52+01:00 summary: bpo-40334: Correctly identify invalid target in assignment errors (GH-20076) Co-authored-by: Lysandros Nikolaou files: M Grammar/python.gram M Lib/test/test_dictcomps.py M Lib/test/test_generators.py M Lib/test/test_genexps.py M Lib/test/test_peg_parser.py M Lib/test/test_syntax.py M Parser/pegen/parse.c M Parser/pegen/pegen.c M Parser/pegen/pegen.h M Python/ast.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 9087c7aa718b1..cca9209054626 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -640,8 +640,17 @@ invalid_assignment: RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } | a=expression ':' expression ['=' annotated_rhs] { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } - | a=expression ('=' | augassign) (yield_expr | star_expressions) { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot assign to %s", _PyPegen_get_expr_name(a)) } + | a=star_expressions '=' (yield_expr | star_expressions) { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + _PyPegen_get_invalid_target(a), + "cannot assign to %s", _PyPegen_get_expr_name(_PyPegen_get_invalid_target(a))) } + | a=star_expressions augassign (yield_expr | star_expressions) { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, + "'%s' is an illegal expression for augmented assignment", + _PyPegen_get_expr_name(a) + )} + invalid_block: | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") } invalid_comprehension: diff --git a/Lib/test/test_dictcomps.py b/Lib/test/test_dictcomps.py index 16aa651b93c46..472e3dfa0d8a0 100644 --- a/Lib/test/test_dictcomps.py +++ b/Lib/test/test_dictcomps.py @@ -77,7 +77,7 @@ def test_illegal_assignment(self): compile("{x: y for y, x in ((1, 2), (3, 4))} = 5", "", "exec") - with self.assertRaisesRegex(SyntaxError, "cannot assign"): + with self.assertRaisesRegex(SyntaxError, "illegal expression"): compile("{x: y for y, x in ((1, 2), (3, 4))} += 5", "", "exec") diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 1081107ee64ac..348ae15aa6532 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -1921,7 +1921,7 @@ def printsolution(self, x): >>> def f(): (yield bar) += y Traceback (most recent call last): ... -SyntaxError: cannot assign to yield expression +SyntaxError: 'yield expression' is an illegal expression for augmented assignment Now check some throw() conditions: diff --git a/Lib/test/test_genexps.py b/Lib/test/test_genexps.py index 86e4e195f55ec..5c1a209b0e990 100644 --- a/Lib/test/test_genexps.py +++ b/Lib/test/test_genexps.py @@ -158,7 +158,7 @@ >>> (y for y in (1,2)) += 10 Traceback (most recent call last): ... - SyntaxError: cannot assign to generator expression + SyntaxError: 'generator expression' is an illegal expression for augmented assignment ########### Tests borrowed from or inspired by test_generators.py ############ diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index 71e071940de2f..9614e45799dd8 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -625,7 +625,7 @@ def f(): ("(a, b): int", "only single target (not tuple) can be annotated"), ("[a, b]: int", "only single target (not list) can be annotated"), ("a(): int", "illegal target for annotation"), - ("1 += 1", "cannot assign to literal"), + ("1 += 1", "'literal' is an illegal expression for augmented assignment"), ("pass\n pass", "unexpected indent"), ("def f():\npass", "expected an indented block"), ("def f(*): pass", "named arguments must follow bare *"), diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index a3a101534628a..60c7d9fd3868e 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -100,30 +100,37 @@ This test just checks a couple of cases rather than enumerating all of them. -# All of the following also produce different error messages with pegen -# >>> (a, "b", c) = (1, 2, 3) -# Traceback (most recent call last): -# SyntaxError: cannot assign to literal +>>> (a, "b", c) = (1, 2, 3) +Traceback (most recent call last): +SyntaxError: cannot assign to literal -# >>> (a, True, c) = (1, 2, 3) -# Traceback (most recent call last): -# SyntaxError: cannot assign to True +>>> (a, True, c) = (1, 2, 3) +Traceback (most recent call last): +SyntaxError: cannot assign to True >>> (a, __debug__, c) = (1, 2, 3) Traceback (most recent call last): SyntaxError: cannot assign to __debug__ -# >>> (a, *True, c) = (1, 2, 3) -# Traceback (most recent call last): -# SyntaxError: cannot assign to True +>>> (a, *True, c) = (1, 2, 3) +Traceback (most recent call last): +SyntaxError: cannot assign to True >>> (a, *__debug__, c) = (1, 2, 3) Traceback (most recent call last): SyntaxError: cannot assign to __debug__ -# >>> [a, b, c + 1] = [1, 2, 3] -# Traceback (most recent call last): -# SyntaxError: cannot assign to operator +>>> [a, b, c + 1] = [1, 2, 3] +Traceback (most recent call last): +SyntaxError: cannot assign to operator + +>>> [a, b[1], c + 1] = [1, 2, 3] +Traceback (most recent call last): +SyntaxError: cannot assign to operator + +>>> [a, b.c.d, c + 1] = [1, 2, 3] +Traceback (most recent call last): +SyntaxError: cannot assign to operator >>> a if 1 else b = 1 Traceback (most recent call last): @@ -131,15 +138,15 @@ >>> a, b += 1, 2 Traceback (most recent call last): -SyntaxError: invalid syntax +SyntaxError: 'tuple' is an illegal expression for augmented assignment >>> (a, b) += 1, 2 Traceback (most recent call last): -SyntaxError: cannot assign to tuple +SyntaxError: 'tuple' is an illegal expression for augmented assignment >>> [a, b] += 1, 2 Traceback (most recent call last): -SyntaxError: cannot assign to list +SyntaxError: 'list' is an illegal expression for augmented assignment From compiler_complex_args(): @@ -346,16 +353,16 @@ >>> (x for x in x) += 1 Traceback (most recent call last): -SyntaxError: cannot assign to generator expression +SyntaxError: 'generator expression' is an illegal expression for augmented assignment >>> None += 1 Traceback (most recent call last): -SyntaxError: cannot assign to None +SyntaxError: 'None' is an illegal expression for augmented assignment >>> __debug__ += 1 Traceback (most recent call last): SyntaxError: cannot assign to __debug__ >>> f() += 1 Traceback (most recent call last): -SyntaxError: cannot assign to function call +SyntaxError: 'function call' is an illegal expression for augmented assignment Test continue in finally in weird combinations. @@ -688,6 +695,7 @@ def _check_error(self, code, errtext, def test_assign_call(self): self._check_error("f() = 1", "assign") + @unittest.skipIf(support.use_old_parser(), "The old parser cannot generate these error messages") def test_assign_del(self): self._check_error("del (,)", "invalid syntax") self._check_error("del 1", "delete literal") diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 851d17226d162..f4c5692212768 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -10747,7 +10747,8 @@ invalid_named_expression_rule(Parser *p) // | tuple ':' // | star_named_expression ',' star_named_expressions* ':' // | expression ':' expression ['=' annotated_rhs] -// | expression ('=' | augassign) (yield_expr | star_expressions) +// | star_expressions '=' (yield_expr | star_expressions) +// | star_expressions augassign (yield_expr | star_expressions) static void * invalid_assignment_rule(Parser *p) { @@ -10841,19 +10842,40 @@ invalid_assignment_rule(Parser *p) } p->mark = _mark; } - { // expression ('=' | augassign) (yield_expr | star_expressions) + { // star_expressions '=' (yield_expr | star_expressions) + Token * _literal; void *_tmp_128_var; + expr_ty a; + if ( + (a = star_expressions_rule(p)) // star_expressions + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions + ) + { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( _PyPegen_get_invalid_target ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( _PyPegen_get_invalid_target ( a ) ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + { // star_expressions augassign (yield_expr | star_expressions) void *_tmp_129_var; expr_ty a; + AugOperator* augassign_var; if ( - (a = expression_rule(p)) // expression + (a = star_expressions_rule(p)) // star_expressions && - (_tmp_128_var = _tmp_128_rule(p)) // '=' | augassign + (augassign_var = augassign_rule(p)) // augassign && (_tmp_129_var = _tmp_129_rule(p)) // yield_expr | star_expressions ) { - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "'%s' is an illegal expression for augmented assignment" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -16675,7 +16697,7 @@ _tmp_127_rule(Parser *p) return _res; } -// _tmp_128: '=' | augassign +// _tmp_128: yield_expr | star_expressions static void * _tmp_128_rule(Parser *p) { @@ -16684,24 +16706,24 @@ _tmp_128_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // '=' - Token * _literal; + { // yield_expr + expr_ty yield_expr_var; if ( - (_literal = _PyPegen_expect_token(p, 22)) // token='=' + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - _res = _literal; + _res = yield_expr_var; goto done; } p->mark = _mark; } - { // augassign - AugOperator* augassign_var; + { // star_expressions + expr_ty star_expressions_var; if ( - (augassign_var = augassign_rule(p)) // augassign + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - _res = augassign_var; + _res = star_expressions_var; goto done; } p->mark = _mark; diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 8b79a7364758e..7f3e4561de605 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -2054,3 +2054,49 @@ _PyPegen_make_module(Parser *p, asdl_seq *a) { } return Module(a, type_ignores, p->arena); } + +// Error reporting helpers + +expr_ty +_PyPegen_get_invalid_target(expr_ty e) +{ + if (e == NULL) { + return NULL; + } + +#define VISIT_CONTAINER(CONTAINER, TYPE) do { \ + Py_ssize_t len = asdl_seq_LEN(CONTAINER->v.TYPE.elts);\ + for (Py_ssize_t i = 0; i < len; i++) {\ + expr_ty other = asdl_seq_GET(CONTAINER->v.TYPE.elts, i);\ + expr_ty child = _PyPegen_get_invalid_target(other);\ + if (child != NULL) {\ + return child;\ + }\ + }\ + } while (0) + + // We only need to visit List and Tuple nodes recursively as those + // are the only ones that can contain valid names in targets when + // they are parsed as expressions. Any other kind of expression + // that is a container (like Sets or Dicts) is directly invalid and + // we don't need to visit it recursively. + + switch (e->kind) { + case List_kind: { + VISIT_CONTAINER(e, List); + return NULL; + } + case Tuple_kind: { + VISIT_CONTAINER(e, Tuple); + return NULL; + } + case Starred_kind: + return _PyPegen_get_invalid_target(e->v.Starred.value); + case Name_kind: + case Subscript_kind: + case Attribute_kind: + return NULL; + default: + return e; + } +} \ No newline at end of file diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index e5b1b757bd894..b9d4c048bb52b 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -260,6 +260,10 @@ void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); int _PyPegen_check_barry_as_flufl(Parser *); mod_ty _PyPegen_make_module(Parser *, asdl_seq *); +// Error reporting helpers + +expr_ty _PyPegen_get_invalid_target(expr_ty e); + void *_PyPegen_parse(Parser *); #endif diff --git a/Python/ast.c b/Python/ast.c index 1a4a3110e6955..2d20ca62aa837 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -3164,10 +3164,7 @@ ast_for_expr_stmt(struct compiling *c, const node *n) expr1 = ast_for_testlist(c, ch); if (!expr1) return NULL; - if(!set_context(c, expr1, Store, ch)) - return NULL; - /* set_context checks that most expressions are not the left side. - Augmented assignments can only have a name, a subscript, or an + /* Augmented assignments can only have a name, a subscript, or an attribute on the left, though, so we have to explicitly check for those. */ switch (expr1->kind) { @@ -3176,10 +3173,16 @@ ast_for_expr_stmt(struct compiling *c, const node *n) case Subscript_kind: break; default: - ast_error(c, ch, "illegal expression for augmented assignment"); + ast_error(c, ch, "'%s' is an illegal expression for augmented assignment", + get_expr_name(expr1)); return NULL; } + /* set_context checks that most expressions are not the left side. */ + if(!set_context(c, expr1, Store, ch)) { + return NULL; + } + ch = CHILD(n, 2); if (TYPE(ch) == testlist) expr2 = ast_for_testlist(c, ch); From webhook-mailer at python.org Thu May 14 21:19:46 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 01:19:46 -0000 Subject: [Python-checkins] bpo-38872: Document exec symbol for codeop.compile_command (GH-20047) (#20099) Message-ID: https://github.com/python/cpython/commit/c1203b75ffe429b28cb2e2480deb0d0b8d3a941c commit: c1203b75ffe429b28cb2e2480deb0d0b8d3a941c branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-14T22:19:38-03:00 summary: bpo-38872: Document exec symbol for codeop.compile_command (GH-20047) (#20099) * Document exec symbol for codeop.compile_command * Remove extra statements Co-authored-by: nanjekyejoannah (cherry picked from commit 7ba1f75f3f02b4b50ac6d7e17d15e467afa36aac) Co-authored-by: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> Co-authored-by: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> files: M Doc/library/code.rst M Doc/library/codeop.rst M Lib/codeop.py diff --git a/Doc/library/code.rst b/Doc/library/code.rst index 6708079f778c1..538e5afc7822a 100644 --- a/Doc/library/code.rst +++ b/Doc/library/code.rst @@ -56,8 +56,8 @@ build applications which provide an interactive interpreter prompt. *source* is the source string; *filename* is the optional filename from which source was read, defaulting to ``''``; and *symbol* is the optional - grammar start symbol, which should be either ``'single'`` (the default) or - ``'eval'``. + grammar start symbol, which should be ``'single'`` (the default), ``'eval'`` + or ``'exec'``. Returns a code object (the same as ``compile(source, filename, symbol)``) if the command is complete and valid; ``None`` if the command is incomplete; raises diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst index a52d2c62c4fea..c66b9d3ec0a26 100644 --- a/Doc/library/codeop.rst +++ b/Doc/library/codeop.rst @@ -43,8 +43,9 @@ To do just the former: :exc:`OverflowError` or :exc:`ValueError` if there is an invalid literal. The *symbol* argument determines whether *source* is compiled as a statement - (``'single'``, the default) or as an :term:`expression` (``'eval'``). Any - other value will cause :exc:`ValueError` to be raised. + (``'single'``, the default), as a sequence of statements (``'exec'``) or + as an :term:`expression` (``'eval'``). Any other value will + cause :exc:`ValueError` to be raised. .. note:: diff --git a/Lib/codeop.py b/Lib/codeop.py index 0fa677f609b74..3c37f35eb0250 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -112,7 +112,8 @@ def compile_command(source, filename="", symbol="single"): source -- the source string; may contain \n characters filename -- optional filename from which source was read; default "" - symbol -- optional grammar start symbol; "single" (default) or "eval" + symbol -- optional grammar start symbol; "single" (default), "exec" + or "eval" Return value / exceptions raised: From webhook-mailer at python.org Thu May 14 21:38:25 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 01:38:25 -0000 Subject: [Python-checkins] bpo-38872: Document exec symbol for codeop.compile_command (GH-20047) (GH-20098) Message-ID: https://github.com/python/cpython/commit/eb5ce324f724a59c51d7a76d1dd49b550cdf386b commit: eb5ce324f724a59c51d7a76d1dd49b550cdf386b branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-14T22:38:17-03:00 summary: bpo-38872: Document exec symbol for codeop.compile_command (GH-20047) (GH-20098) * Document exec symbol for codeop.compile_command * Remove extra statements Co-authored-by: nanjekyejoannah (cherry picked from commit 7ba1f75f3f02b4b50ac6d7e17d15e467afa36aac) Co-authored-by: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> Co-authored-by: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> files: M Doc/library/code.rst M Doc/library/codeop.rst M Lib/codeop.py diff --git a/Doc/library/code.rst b/Doc/library/code.rst index 6708079f778c1..538e5afc7822a 100644 --- a/Doc/library/code.rst +++ b/Doc/library/code.rst @@ -56,8 +56,8 @@ build applications which provide an interactive interpreter prompt. *source* is the source string; *filename* is the optional filename from which source was read, defaulting to ``''``; and *symbol* is the optional - grammar start symbol, which should be either ``'single'`` (the default) or - ``'eval'``. + grammar start symbol, which should be ``'single'`` (the default), ``'eval'`` + or ``'exec'``. Returns a code object (the same as ``compile(source, filename, symbol)``) if the command is complete and valid; ``None`` if the command is incomplete; raises diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst index a52d2c62c4fea..c66b9d3ec0a26 100644 --- a/Doc/library/codeop.rst +++ b/Doc/library/codeop.rst @@ -43,8 +43,9 @@ To do just the former: :exc:`OverflowError` or :exc:`ValueError` if there is an invalid literal. The *symbol* argument determines whether *source* is compiled as a statement - (``'single'``, the default) or as an :term:`expression` (``'eval'``). Any - other value will cause :exc:`ValueError` to be raised. + (``'single'``, the default), as a sequence of statements (``'exec'``) or + as an :term:`expression` (``'eval'``). Any other value will + cause :exc:`ValueError` to be raised. .. note:: diff --git a/Lib/codeop.py b/Lib/codeop.py index 0fa677f609b74..3c37f35eb0250 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -112,7 +112,8 @@ def compile_command(source, filename="", symbol="single"): source -- the source string; may contain \n characters filename -- optional filename from which source was read; default "" - symbol -- optional grammar start symbol; "single" (default) or "eval" + symbol -- optional grammar start symbol; "single" (default), "exec" + or "eval" Return value / exceptions raised: From webhook-mailer at python.org Thu May 14 22:11:09 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Fri, 15 May 2020 02:11:09 -0000 Subject: [Python-checkins] Update code comment re: location of struct _is. (GH-20067) Message-ID: https://github.com/python/cpython/commit/1aa8767baf498a920f0461d1088772a12dcb4d20 commit: 1aa8767baf498a920f0461d1088772a12dcb4d20 branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-14T19:11:00-07:00 summary: Update code comment re: location of struct _is. (GH-20067) files: M Include/pystate.h diff --git a/Include/pystate.h b/Include/pystate.h index 34cad02c3a930..bae440778b261 100644 --- a/Include/pystate.h +++ b/Include/pystate.h @@ -18,7 +18,7 @@ struct _is; /* struct _ts is defined in cpython/pystate.h */ typedef struct _ts PyThreadState; -/* struct _is is defined in internal/pycore_pystate.h */ +/* struct _is is defined in internal/pycore_interp.h */ typedef struct _is PyInterpreterState; PyAPI_FUNC(PyInterpreterState *) PyInterpreterState_New(void); From webhook-mailer at python.org Thu May 14 22:22:56 2020 From: webhook-mailer at python.org (Guido van Rossum) Date: Fri, 15 May 2020 02:22:56 -0000 Subject: [Python-checkins] bpo-40612: Fix SyntaxError edge cases in traceback formatting (GH-20072) Message-ID: https://github.com/python/cpython/commit/15bc9ab301d73f20bff47a12ef05326feb40f797 commit: 15bc9ab301d73f20bff47a12ef05326feb40f797 branch: master author: Guido van Rossum committer: GitHub date: 2020-05-14T19:22:48-07:00 summary: bpo-40612: Fix SyntaxError edge cases in traceback formatting (GH-20072) This fixes both the traceback.py module and the C code for formatting syntax errors (in Python/pythonrun.c). They now both consistently do the following: - Suppress caret if it points left of text - Allow caret pointing just past end of line - If caret points past end of line, clip to *just* past end of line The syntax error formatting code in traceback.py was mostly rewritten; small, subtle changes were applied to the C code in pythonrun.c. There's still a difference when the text contains embedded newlines. Neither handles these very well, and I don't think the case occurs in practice. Automerge-Triggered-By: @gvanrossum files: A Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst M Lib/test/test_cmd_line_script.py M Lib/test/test_traceback.py M Lib/traceback.py M Python/pythonrun.c diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index 171340581af22..15fca7b8a5191 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -633,7 +633,7 @@ def test_syntaxerror_multi_line_fstring(self): stderr.splitlines()[-3:], [ b' foo"""', - b' ^', + b' ^', b'SyntaxError: f-string: empty expression not allowed', ], ) diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 7361d091cfbbe..f9a5f2fc53e1e 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -58,13 +58,13 @@ def test_caret(self): SyntaxError) self.assertIn("^", err[2]) # third line has caret self.assertEqual(err[2].count('\n'), 1) # and no additional newline - self.assertEqual(err[1].find("+"), err[2].find("^")) # in the right place + self.assertEqual(err[1].find("+") + 1, err[2].find("^")) # in the right place err = self.get_exception_format(self.syntax_error_with_caret_non_ascii, SyntaxError) self.assertIn("^", err[2]) # third line has caret self.assertEqual(err[2].count('\n'), 1) # and no additional newline - self.assertEqual(err[1].find("+"), err[2].find("^")) # in the right place + self.assertEqual(err[1].find("+") + 1, err[2].find("^")) # in the right place def test_nocaret(self): exc = SyntaxError("error", ("x.py", 23, None, "bad syntax")) @@ -78,14 +78,13 @@ def test_bad_indentation(self): self.assertEqual(len(err), 4) self.assertEqual(err[1].strip(), "print(2)") self.assertIn("^", err[2]) - self.assertEqual(err[1].find(")"), err[2].find("^")) + self.assertEqual(err[1].find(")") + 1, err[2].find("^")) + # No caret for "unexpected indent" err = self.get_exception_format(self.syntax_error_bad_indentation2, IndentationError) - self.assertEqual(len(err), 4) + self.assertEqual(len(err), 3) self.assertEqual(err[1].strip(), "print(2)") - self.assertIn("^", err[2]) - self.assertEqual(err[1].find("p"), err[2].find("^")) def test_base_exception(self): # Test that exceptions derived from BaseException are formatted right @@ -656,7 +655,7 @@ def outer_raise(): self.assertIn('inner_raise() # Marker', blocks[2]) self.check_zero_div(blocks[2]) - @support.skip_if_new_parser("Pegen is arguably better here, so no need to fix this") + @unittest.skipIf(support.use_old_parser(), "Pegen is arguably better here, so no need to fix this") def test_syntax_error_offset_at_eol(self): # See #10186. def e(): @@ -666,7 +665,7 @@ def e(): def e(): exec("x = 5 | 4 |") msg = self.get_report(e).splitlines() - self.assertEqual(msg[-2], ' ^') + self.assertEqual(msg[-2], ' ^') def test_message_none(self): # A message that looks like "None" should not be treated specially @@ -679,6 +678,25 @@ def test_message_none(self): err = self.get_report(Exception('')) self.assertIn('Exception\n', err) + def test_syntax_error_various_offsets(self): + for offset in range(-5, 10): + for add in [0, 2]: + text = " "*add + "text%d" % offset + expected = [' File "file.py", line 1'] + if offset < 1: + expected.append(" %s" % text.lstrip()) + elif offset <= 6: + expected.append(" %s" % text.lstrip()) + expected.append(" %s^" % (" "*(offset-1))) + else: + expected.append(" %s" % text.lstrip()) + expected.append(" %s^" % (" "*5)) + expected.append("SyntaxError: msg") + expected.append("") + err = self.get_report(SyntaxError("msg", ("file.py", 1, offset+add, text))) + exp = "\n".join(expected) + self.assertEqual(exp, err) + class PyExcReportingTests(BaseExceptionReportingTests, unittest.TestCase): # diff --git a/Lib/traceback.py b/Lib/traceback.py index bf34bbab8a162..a19e38718b120 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -569,23 +569,30 @@ def format_exception_only(self): if not issubclass(self.exc_type, SyntaxError): yield _format_final_exc_line(stype, self._str) - return + else: + yield from self._format_syntax_error(stype) - # It was a syntax error; show exactly where the problem was found. + def _format_syntax_error(self, stype): + """Format SyntaxError exceptions (internal helper).""" + # Show exactly where the problem was found. filename = self.filename or "" lineno = str(self.lineno) or '?' yield ' File "{}", line {}\n'.format(filename, lineno) - badline = self.text - offset = self.offset - if badline is not None: - yield ' {}\n'.format(badline.strip()) - if offset is not None: - caretspace = badline.rstrip('\n') - offset = min(len(caretspace), offset) - 1 - caretspace = caretspace[:offset].lstrip() + text = self.text + if text is not None: + # text = " foo\n" + # rtext = " foo" + # ltext = "foo" + rtext = text.rstrip('\n') + ltext = rtext.lstrip(' \n\f') + spaces = len(rtext) - len(ltext) + yield ' {}\n'.format(ltext) + # Convert 1-based column offset to 0-based index into stripped text + caret = (self.offset or 0) - 1 - spaces + if caret >= 0: # non-space whitespace (likes tabs) must be kept for alignment - caretspace = ((c.isspace() and c or ' ') for c in caretspace) + caretspace = ((c if c.isspace() else ' ') for c in ltext[:caret]) yield ' {}^\n'.format(''.join(caretspace)) msg = self.msg or "" yield "{}: {}\n".format(stype, msg) diff --git a/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst b/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst new file mode 100644 index 0000000000000..32cc8073d3f79 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst @@ -0,0 +1,2 @@ +Fix edge cases in SyntaxError formatting. If the offset is <= 0, no caret is printed. +If the offset is > line length, the caret is printed pointing just after the last character. diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 45f08b707eb99..160f44d38e2e1 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -554,37 +554,65 @@ parse_syntax_error(PyObject *err, PyObject **message, PyObject **filename, static void print_error_text(PyObject *f, int offset, PyObject *text_obj) { - const char *text; - const char *nl; - - text = PyUnicode_AsUTF8(text_obj); + /* Convert text to a char pointer; return if error */ + const char *text = PyUnicode_AsUTF8(text_obj); if (text == NULL) return; - if (offset >= 0) { - if (offset > 0 && (size_t)offset == strlen(text) && text[offset - 1] == '\n') - offset--; - for (;;) { - nl = strchr(text, '\n'); - if (nl == NULL || nl-text >= offset) - break; - offset -= (int)(nl+1-text); - text = nl+1; + /* Convert offset from 1-based to 0-based */ + offset--; + + /* Strip leading whitespace from text, adjusting offset as we go */ + while (*text == ' ' || *text == '\t' || *text == '\f') { + text++; + offset--; + } + + /* Calculate text length excluding trailing newline */ + Py_ssize_t len = strlen(text); + if (len > 0 && text[len-1] == '\n') { + len--; + } + + /* Clip offset to at most len */ + if (offset > len) { + offset = len; + } + + /* Skip past newlines embedded in text */ + for (;;) { + const char *nl = strchr(text, '\n'); + if (nl == NULL) { + break; } - while (*text == ' ' || *text == '\t' || *text == '\f') { - text++; - offset--; + Py_ssize_t inl = nl - text; + if (inl >= (Py_ssize_t)offset) { + break; } + inl += 1; + text += inl; + len -= inl; + offset -= (int)inl; } + + /* Print text */ PyFile_WriteString(" ", f); PyFile_WriteString(text, f); - if (*text == '\0' || text[strlen(text)-1] != '\n') + + /* Make sure there's a newline at the end */ + if (text[len] != '\n') { PyFile_WriteString("\n", f); - if (offset == -1) + } + + /* Don't print caret if it points to the left of the text */ + if (offset < 0) return; + + /* Write caret line */ PyFile_WriteString(" ", f); - while (--offset > 0) + while (--offset >= 0) { PyFile_WriteString(" ", f); + } PyFile_WriteString("^\n", f); } From webhook-mailer at python.org Thu May 14 22:25:50 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Fri, 15 May 2020 02:25:50 -0000 Subject: [Python-checkins] Fix typo in code comment in main_loop label. (GH-20068) Message-ID: https://github.com/python/cpython/commit/4a12d121860fb60d56cdcc212817577cac2356d0 commit: 4a12d121860fb60d56cdcc212817577cac2356d0 branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-14T19:25:45-07:00 summary: Fix typo in code comment in main_loop label. (GH-20068) files: M Python/ceval.c diff --git a/Python/ceval.c b/Python/ceval.c index 699ad86a365b1..43ea1c760b17e 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -1396,7 +1396,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, PyFrameObject *f, int throwflag) /* Do periodic things. Doing this every time through the loop would add too much overhead, so we do it only every Nth instruction. We also do it if - ``pendingcalls_to_do'' is set, i.e. when an asynchronous + ``pending.calls_to_do'' is set, i.e. when an asynchronous event needs attention (e.g. a signal handler or async I/O handler); see Py_AddPendingCall() and Py_MakePendingCalls() above. */ From webhook-mailer at python.org Thu May 14 23:27:56 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 15 May 2020 03:27:56 -0000 Subject: [Python-checkins] [3.8] bpo-40548: GitHub Action workflow: skip jobs on doc only PRs (GH-20100) Message-ID: https://github.com/python/cpython/commit/07bd5cf3d9551ae84100e6400836163fcd507f07 commit: 07bd5cf3d9551ae84100e6400836163fcd507f07 branch: 3.8 author: Victor Stinner committer: GitHub date: 2020-05-15T05:27:48+02:00 summary: [3.8] bpo-40548: GitHub Action workflow: skip jobs on doc only PRs (GH-20100) * bpo-40548: Always run GitHub action, even on doc PRs (GH-19981) Always run GitHub action jobs, even on documentation-only pull requests. So it will be possible to make a GitHub action job, like the Windows (64-bit) job, mandatory. (cherry picked from commit 4e363761fc02a89d53aba4382dc451293bd6f0ba) * bpo-40548: GitHub Action workflow: skip jobs on doc only PRs (GH-19983) Signed-off-by: Filipe La?ns (cherry picked from commit 75d7257b201a56f950c20cd9f5753a83fff4742b) * bpo-40548: github actions: pass the changes check on no source changes (GH-20097) Signed-off-by: Filipe La?ns (cherry picked from commit 6a78589b6b22878491a4b042bb8b3161e1d120f6) Co-authored-by: Filipe La?ns Co-authored-by: Filipe La?ns files: M .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d3d67475135a1..27d7f15aa5931 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,27 +1,37 @@ name: Tests +# bpo-40548: "paths-ignore" is not used to skip documentation-only PRs, because +# it prevents to mark a job as mandatory. A PR cannot be merged if a job is +# mandatory but not scheduled because of "paths-ignore". on: - #push: - # branches: - # - master - # - 3.8 - # - 3.7 - # paths-ignore: - # - 'Doc/**' - # - 'Misc/**' pull_request: branches: - master - 3.8 - 3.7 - paths-ignore: - - 'Doc/**' - - 'Misc/**' jobs: + check_source: + name: 'Check for source changes' + runs-on: ubuntu-latest + outputs: + run_tests: ${{ steps.check.outputs.run_tests }} + steps: + - uses: actions/checkout at v2 + - name: Check for source changes + id: check + run: | + if [ -z "GITHUB_BASE_REF" ]; then + echo '::set-output name=run_tests::true' + else + git fetch origin $GITHUB_BASE_REF --depth=1 + git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.rst$|^Doc|^Misc)' && echo '::set-output name=run_tests::true' || true + fi build_win32: name: 'Windows (x86)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout at v1 - name: Build CPython @@ -34,6 +44,8 @@ jobs: build_win_amd64: name: 'Windows (x64)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout at v1 - name: Build CPython @@ -46,6 +58,8 @@ jobs: build_macos: name: 'macOS' runs-on: macos-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout at v1 - name: Configure CPython @@ -60,6 +74,8 @@ jobs: build_ubuntu: name: 'Ubuntu' runs-on: ubuntu-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' env: OPENSSL_VER: 1.1.1f steps: From webhook-mailer at python.org Thu May 14 23:46:32 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 03:46:32 -0000 Subject: [Python-checkins] [3.8] bpo-40548: GitHub Action workflow: skip jobs on doc only PRs (GH-20100) Message-ID: https://github.com/python/cpython/commit/6ad51a1fd6715d8266a43a4a89d496cf0615aace commit: 6ad51a1fd6715d8266a43a4a89d496cf0615aace branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-14T20:46:25-07:00 summary: [3.8] bpo-40548: GitHub Action workflow: skip jobs on doc only PRs (GH-20100) * bpo-40548: Always run GitHub action, even on doc PRs (GH-19981) Always run GitHub action jobs, even on documentation-only pull requests. So it will be possible to make a GitHub action job, like the Windows (64-bit) job, mandatory. (cherry picked from commit 4e363761fc02a89d53aba4382dc451293bd6f0ba) * bpo-40548: GitHub Action workflow: skip jobs on doc only PRs (GH-19983) Signed-off-by: Filipe La?ns (cherry picked from commit 75d7257b201a56f950c20cd9f5753a83fff4742b) * bpo-40548: github actions: pass the changes check on no source changes (GH-20097) Signed-off-by: Filipe La?ns (cherry picked from commit 6a78589b6b22878491a4b042bb8b3161e1d120f6) Co-authored-by: Filipe La?ns Co-authored-by: Filipe La?ns (cherry picked from commit 07bd5cf3d9551ae84100e6400836163fcd507f07) Co-authored-by: Victor Stinner files: M .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d3d67475135a1..27d7f15aa5931 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,27 +1,37 @@ name: Tests +# bpo-40548: "paths-ignore" is not used to skip documentation-only PRs, because +# it prevents to mark a job as mandatory. A PR cannot be merged if a job is +# mandatory but not scheduled because of "paths-ignore". on: - #push: - # branches: - # - master - # - 3.8 - # - 3.7 - # paths-ignore: - # - 'Doc/**' - # - 'Misc/**' pull_request: branches: - master - 3.8 - 3.7 - paths-ignore: - - 'Doc/**' - - 'Misc/**' jobs: + check_source: + name: 'Check for source changes' + runs-on: ubuntu-latest + outputs: + run_tests: ${{ steps.check.outputs.run_tests }} + steps: + - uses: actions/checkout at v2 + - name: Check for source changes + id: check + run: | + if [ -z "GITHUB_BASE_REF" ]; then + echo '::set-output name=run_tests::true' + else + git fetch origin $GITHUB_BASE_REF --depth=1 + git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.rst$|^Doc|^Misc)' && echo '::set-output name=run_tests::true' || true + fi build_win32: name: 'Windows (x86)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout at v1 - name: Build CPython @@ -34,6 +44,8 @@ jobs: build_win_amd64: name: 'Windows (x64)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout at v1 - name: Build CPython @@ -46,6 +58,8 @@ jobs: build_macos: name: 'macOS' runs-on: macos-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout at v1 - name: Configure CPython @@ -60,6 +74,8 @@ jobs: build_ubuntu: name: 'Ubuntu' runs-on: ubuntu-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' env: OPENSSL_VER: 1.1.1f steps: From webhook-mailer at python.org Fri May 15 06:44:13 2020 From: webhook-mailer at python.org (Andrew York) Date: Fri, 15 May 2020 10:44:13 -0000 Subject: [Python-checkins] Trivial typo fix in _tkinter.c (GH-19622) Message-ID: https://github.com/python/cpython/commit/003708bcf8f2c58d4b65f68318acf164d713e008 commit: 003708bcf8f2c58d4b65f68318acf164d713e008 branch: master author: Andrew York committer: GitHub date: 2020-05-15T03:43:58-07:00 summary: Trivial typo fix in _tkinter.c (GH-19622) Change spelling of a #define in _tkinter.c from HAVE_LIBTOMMAMTH to HAVE_LIBTOMMATH, since this is used to keep track of tclTomMath.h, not tclTomMamth.h. No other file seems to refer to this variable. files: M Modules/_tkinter.c diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index f530c5b0eb7b6..793c5e7154884 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -54,7 +54,7 @@ Copyright (C) 1994 Steen Lumholt. #if TK_HEX_VERSION >= 0x08050208 && TK_HEX_VERSION < 0x08060000 || \ TK_HEX_VERSION >= 0x08060200 -#define HAVE_LIBTOMMAMTH +#define HAVE_LIBTOMMATH #include #endif @@ -965,7 +965,7 @@ static PyType_Spec PyTclObject_Type_spec = { #define CHECK_STRING_LENGTH(s) #endif -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH static Tcl_Obj* asBignumObj(PyObject *value) { @@ -1045,7 +1045,7 @@ AsObj(PyObject *value) #endif /* If there is an overflow in the wideInt conversion, fall through to bignum handling. */ -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH return asBignumObj(value); #endif /* If there is no wideInt or bignum support, @@ -1167,7 +1167,7 @@ fromWideIntObj(TkappObject *tkapp, Tcl_Obj *value) return NULL; } -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH static PyObject* fromBignumObj(TkappObject *tkapp, Tcl_Obj *value) { @@ -1247,7 +1247,7 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) fall through to bignum handling. */ } -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH if (value->typePtr == tkapp->IntType || value->typePtr == tkapp->WideIntType || value->typePtr == tkapp->BignumType) { @@ -1300,7 +1300,7 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) } #endif -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH if (tkapp->BignumType == NULL && strcmp(value->typePtr->name, "bignum") == 0) { /* bignum type is not registered in Tcl */ @@ -2001,7 +2001,7 @@ _tkinter_tkapp_getint(TkappObject *self, PyObject *arg) Prefer bignum because Tcl_GetWideIntFromObj returns ambiguous result for value in ranges -2**64..-2**63-1 and 2**63..2**64-1 (on 32-bit platform). */ -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH result = fromBignumObj(self, value); #else result = fromWideIntObj(self, value); From webhook-mailer at python.org Fri May 15 12:06:28 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 15 May 2020 16:06:28 -0000 Subject: [Python-checkins] bpo-40055: test_distutils leaves warnings filters unchanged (GH-20095) Message-ID: https://github.com/python/cpython/commit/6e57237faf0da8904e0130a11350cae3c5062b82 commit: 6e57237faf0da8904e0130a11350cae3c5062b82 branch: master author: Victor Stinner committer: GitHub date: 2020-05-15T18:06:23+02:00 summary: bpo-40055: test_distutils leaves warnings filters unchanged (GH-20095) distutils.tests now saves/restores warnings filters to leave them unchanged. Importing tests imports docutils which imports pkg_resources which adds a warnings filter. files: A Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst M Lib/distutils/tests/__init__.py diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py index 1b939cbd5db2b..5d2e69e3e6a8f 100644 --- a/Lib/distutils/tests/__init__.py +++ b/Lib/distutils/tests/__init__.py @@ -15,6 +15,7 @@ import os import sys import unittest +import warnings from test.support import run_unittest @@ -22,6 +23,7 @@ def test_suite(): + old_filters = warnings.filters[:] suite = unittest.TestSuite() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): @@ -29,6 +31,10 @@ def test_suite(): __import__(modname) module = sys.modules[modname] suite.addTest(module.test_suite()) + # bpo-40055: Save/restore warnings filters to leave them unchanged. + # Importing tests imports docutils which imports pkg_resources which adds a + # warnings filter. + warnings.filters[:] = old_filters return suite diff --git a/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst new file mode 100644 index 0000000000000..edb01182c3a5c --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst @@ -0,0 +1,3 @@ +distutils.tests now saves/restores warnings filters to leave them unchanged. +Importing tests imports docutils which imports pkg_resources which adds a +warnings filter. From webhook-mailer at python.org Fri May 15 12:23:02 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 16:23:02 -0000 Subject: [Python-checkins] bpo-40055: test_distutils leaves warnings filters unchanged (GH-20095) Message-ID: https://github.com/python/cpython/commit/7ef275160953f00b4303149df6d919c0afe763cb commit: 7ef275160953f00b4303149df6d919c0afe763cb branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-15T09:22:54-07:00 summary: bpo-40055: test_distutils leaves warnings filters unchanged (GH-20095) distutils.tests now saves/restores warnings filters to leave them unchanged. Importing tests imports docutils which imports pkg_resources which adds a warnings filter. (cherry picked from commit 6e57237faf0da8904e0130a11350cae3c5062b82) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst M Lib/distutils/tests/__init__.py diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py index 1b939cbd5db2b..5d2e69e3e6a8f 100644 --- a/Lib/distutils/tests/__init__.py +++ b/Lib/distutils/tests/__init__.py @@ -15,6 +15,7 @@ import os import sys import unittest +import warnings from test.support import run_unittest @@ -22,6 +23,7 @@ def test_suite(): + old_filters = warnings.filters[:] suite = unittest.TestSuite() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): @@ -29,6 +31,10 @@ def test_suite(): __import__(modname) module = sys.modules[modname] suite.addTest(module.test_suite()) + # bpo-40055: Save/restore warnings filters to leave them unchanged. + # Importing tests imports docutils which imports pkg_resources which adds a + # warnings filter. + warnings.filters[:] = old_filters return suite diff --git a/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst new file mode 100644 index 0000000000000..edb01182c3a5c --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst @@ -0,0 +1,3 @@ +distutils.tests now saves/restores warnings filters to leave them unchanged. +Importing tests imports docutils which imports pkg_resources which adds a +warnings filter. From webhook-mailer at python.org Fri May 15 12:25:09 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 16:25:09 -0000 Subject: [Python-checkins] bpo-40055: test_distutils leaves warnings filters unchanged (GH-20095) Message-ID: https://github.com/python/cpython/commit/4e6545b002dd4d068b2538ffca60830d0e7fd369 commit: 4e6545b002dd4d068b2538ffca60830d0e7fd369 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-15T09:25:00-07:00 summary: bpo-40055: test_distutils leaves warnings filters unchanged (GH-20095) distutils.tests now saves/restores warnings filters to leave them unchanged. Importing tests imports docutils which imports pkg_resources which adds a warnings filter. (cherry picked from commit 6e57237faf0da8904e0130a11350cae3c5062b82) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst M Lib/distutils/tests/__init__.py diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py index 1b939cbd5db2b..5d2e69e3e6a8f 100644 --- a/Lib/distutils/tests/__init__.py +++ b/Lib/distutils/tests/__init__.py @@ -15,6 +15,7 @@ import os import sys import unittest +import warnings from test.support import run_unittest @@ -22,6 +23,7 @@ def test_suite(): + old_filters = warnings.filters[:] suite = unittest.TestSuite() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): @@ -29,6 +31,10 @@ def test_suite(): __import__(modname) module = sys.modules[modname] suite.addTest(module.test_suite()) + # bpo-40055: Save/restore warnings filters to leave them unchanged. + # Importing tests imports docutils which imports pkg_resources which adds a + # warnings filter. + warnings.filters[:] = old_filters return suite diff --git a/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst new file mode 100644 index 0000000000000..edb01182c3a5c --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst @@ -0,0 +1,3 @@ +distutils.tests now saves/restores warnings filters to leave them unchanged. +Importing tests imports docutils which imports pkg_resources which adds a +warnings filter. From webhook-mailer at python.org Fri May 15 12:28:13 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 15 May 2020 16:28:13 -0000 Subject: [Python-checkins] bpo-40479: Fix hashlib issue with OpenSSL 3.0.0 (GH-20107) Message-ID: https://github.com/python/cpython/commit/16d4e6f6f559b4fd21c9d29fea303489f658674f commit: 16d4e6f6f559b4fd21c9d29fea303489f658674f branch: master author: Christian Heimes committer: GitHub date: 2020-05-15T09:28:05-07:00 summary: bpo-40479: Fix hashlib issue with OpenSSL 3.0.0 (GH-20107) OpenSSL 3.0.0-alpha2 was released today. The FIPS_mode() function has been deprecated and removed. It no longer makes sense with the new provider and context system in OpenSSL 3.0.0. EVP_default_properties_is_fips_enabled() is good enough for our needs in unit tests. It's an internal API, too. Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst M Modules/_hashopenssl.c M Modules/clinic/_hashopenssl.c.h diff --git a/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst b/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst new file mode 100644 index 0000000000000..87ede982f2967 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst @@ -0,0 +1 @@ +The :mod:`hashlib` now compiles with OpenSSL 3.0.0-alpha2. diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 91834e5330f4b..b7661b40d0a7b 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -1109,19 +1109,25 @@ _hashlib.get_fips_mode -> int Determine the OpenSSL FIPS mode of operation. +For OpenSSL 3.0.0 and newer it returns the state of the default provider +in the default OSSL context. It's not quite the same as FIPS_mode() but good +enough for unittests. + Effectively any non-zero return value indicates FIPS mode; values other than 1 may have additional significance. - -See OpenSSL documentation for the FIPS_mode() function for details. [clinic start generated code]*/ static int _hashlib_get_fips_mode_impl(PyObject *module) -/*[clinic end generated code: output=87eece1bab4d3fa9 input=c2799c3132a36d6c]*/ +/*[clinic end generated code: output=87eece1bab4d3fa9 input=2db61538c41c6fef]*/ { + int result; +#if OPENSSL_VERSION_NUMBER >= 0x30000000L + result = EVP_default_properties_is_fips_enabled(NULL); +#else ERR_clear_error(); - int result = FIPS_mode(); + result = FIPS_mode(); if (result == 0) { // "If the library was built without support of the FIPS Object Module, // then the function will return 0 with an error code of @@ -1134,6 +1140,7 @@ _hashlib_get_fips_mode_impl(PyObject *module) } } return result; +#endif } #endif // !LIBRESSL_VERSION_NUMBER diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index 275784dcdcd0e..1b0c6d0ce43d2 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -733,10 +733,12 @@ PyDoc_STRVAR(_hashlib_get_fips_mode__doc__, "\n" "Determine the OpenSSL FIPS mode of operation.\n" "\n" -"Effectively any non-zero return value indicates FIPS mode;\n" -"values other than 1 may have additional significance.\n" +"For OpenSSL 3.0.0 and newer it returns the state of the default provider\n" +"in the default OSSL context. It\'s not quite the same as FIPS_mode() but good\n" +"enough for unittests.\n" "\n" -"See OpenSSL documentation for the FIPS_mode() function for details."); +"Effectively any non-zero return value indicates FIPS mode;\n" +"values other than 1 may have additional significance."); #define _HASHLIB_GET_FIPS_MODE_METHODDEF \ {"get_fips_mode", (PyCFunction)_hashlib_get_fips_mode, METH_NOARGS, _hashlib_get_fips_mode__doc__}, @@ -769,4 +771,4 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF #define _HASHLIB_GET_FIPS_MODE_METHODDEF #endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */ -/*[clinic end generated code: output=b0703dd5a043394d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4babbd88389a196b input=a9049054013a1b77]*/ From webhook-mailer at python.org Fri May 15 12:48:34 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 15 May 2020 16:48:34 -0000 Subject: [Python-checkins] bpo-40479: Test with latest OpenSSL versions (GH-20108) Message-ID: https://github.com/python/cpython/commit/62d618c06bd395308b7163dbcb26c7e6d0922033 commit: 62d618c06bd395308b7163dbcb26c7e6d0922033 branch: master author: Christian Heimes committer: GitHub date: 2020-05-15T09:48:25-07:00 summary: bpo-40479: Test with latest OpenSSL versions (GH-20108) * 1.0.2u (EOL) * 1.1.0l (EOL) * 1.1.1g * 3.0.0-alpha2 (disabled for now) Build the FIPS provider and create a FIPS configuration file for OpenSSL 3.0.0. Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran files: A Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst new file mode 100644 index 0000000000000..b59035971d7b0 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst @@ -0,0 +1,2 @@ +Update multissltest helper to test with latest OpenSSL 1.0.2, 1.1.0, 1.1.1, +and 3.0.0-alpha. diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 05d6d7de296db..7aa28bd2157fb 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -41,13 +41,13 @@ log = logging.getLogger("multissl") OPENSSL_OLD_VERSIONS = [ - "1.0.2", ] OPENSSL_RECENT_VERSIONS = [ - "1.0.2t", + "1.0.2u", "1.1.0l", - "1.1.1f", + "1.1.1g", + # "3.0.0-alpha2" ] LIBRESSL_OLD_VERSIONS = [ @@ -143,6 +143,23 @@ help="Keep original sources for debugging." ) +OPENSSL_FIPS_CNF = """\ +openssl_conf = openssl_init + +.include {self.install_dir}/ssl/fipsinstall.cnf +# .include {self.install_dir}/ssl/openssl.cnf + +[openssl_init] +providers = provider_sect + +[provider_sect] +fips = fips_sect +default = default_sect + +[default_sect] +activate = 1 +""" + class AbstractBuilder(object): library = None @@ -291,9 +308,13 @@ def _make_install(self): ["make", "-j1", self.install_target], cwd=self.build_dir ) + self._post_install() if not self.args.keep_sources: shutil.rmtree(self.build_dir) + def _post_install(self): + pass + def install(self): log.info(self.openssl_cli) if not self.has_openssl or self.args.force: @@ -365,6 +386,40 @@ class BuildOpenSSL(AbstractBuilder): # only install software, skip docs install_target = 'install_sw' + def _post_install(self): + if self.version.startswith("3.0"): + self._post_install_300() + + def _post_install_300(self): + # create ssl/ subdir with example configs + self._subprocess_call( + ["make", "-j1", "install_ssldirs"], + cwd=self.build_dir + ) + # Install FIPS module + # https://wiki.openssl.org/index.php/OpenSSL_3.0#Completing_the_installation_of_the_FIPS_Module + fipsinstall_cnf = os.path.join( + self.install_dir, "ssl", "fipsinstall.cnf" + ) + openssl_fips_cnf = os.path.join( + self.install_dir, "ssl", "openssl-fips.cnf" + ) + fips_mod = os.path.join(self.lib_dir, "ossl-modules/fips.so") + self._subprocess_call( + [ + self.openssl_cli, "fipsinstall", + "-out", fipsinstall_cnf, + "-module", fips_mod, + "-provider_name", "fips", + "-mac_name", "HMAC", + "-macopt", "digest:SHA256", + "-macopt", "hexkey:00", + "-section_name", "fips_sect" + ] + ) + with open(openssl_fips_cnf, "w") as f: + f.write(OPENSSL_FIPS_CNF.format(self=self)) + class BuildLibreSSL(AbstractBuilder): library = "LibreSSL" From webhook-mailer at python.org Fri May 15 13:06:02 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 17:06:02 -0000 Subject: [Python-checkins] bpo-40479: Test with latest OpenSSL versions (GH-20108) Message-ID: https://github.com/python/cpython/commit/5e6b491403d7211588dcd399167f5bc21781c69c commit: 5e6b491403d7211588dcd399167f5bc21781c69c branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-15T10:05:57-07:00 summary: bpo-40479: Test with latest OpenSSL versions (GH-20108) * 1.0.2u (EOL) * 1.1.0l (EOL) * 1.1.1g * 3.0.0-alpha2 (disabled for now) Build the FIPS provider and create a FIPS configuration file for OpenSSL 3.0.0. Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran (cherry picked from commit 62d618c06bd395308b7163dbcb26c7e6d0922033) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst new file mode 100644 index 0000000000000..b59035971d7b0 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst @@ -0,0 +1,2 @@ +Update multissltest helper to test with latest OpenSSL 1.0.2, 1.1.0, 1.1.1, +and 3.0.0-alpha. diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 05d6d7de296db..7aa28bd2157fb 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -41,13 +41,13 @@ log = logging.getLogger("multissl") OPENSSL_OLD_VERSIONS = [ - "1.0.2", ] OPENSSL_RECENT_VERSIONS = [ - "1.0.2t", + "1.0.2u", "1.1.0l", - "1.1.1f", + "1.1.1g", + # "3.0.0-alpha2" ] LIBRESSL_OLD_VERSIONS = [ @@ -143,6 +143,23 @@ help="Keep original sources for debugging." ) +OPENSSL_FIPS_CNF = """\ +openssl_conf = openssl_init + +.include {self.install_dir}/ssl/fipsinstall.cnf +# .include {self.install_dir}/ssl/openssl.cnf + +[openssl_init] +providers = provider_sect + +[provider_sect] +fips = fips_sect +default = default_sect + +[default_sect] +activate = 1 +""" + class AbstractBuilder(object): library = None @@ -291,9 +308,13 @@ def _make_install(self): ["make", "-j1", self.install_target], cwd=self.build_dir ) + self._post_install() if not self.args.keep_sources: shutil.rmtree(self.build_dir) + def _post_install(self): + pass + def install(self): log.info(self.openssl_cli) if not self.has_openssl or self.args.force: @@ -365,6 +386,40 @@ class BuildOpenSSL(AbstractBuilder): # only install software, skip docs install_target = 'install_sw' + def _post_install(self): + if self.version.startswith("3.0"): + self._post_install_300() + + def _post_install_300(self): + # create ssl/ subdir with example configs + self._subprocess_call( + ["make", "-j1", "install_ssldirs"], + cwd=self.build_dir + ) + # Install FIPS module + # https://wiki.openssl.org/index.php/OpenSSL_3.0#Completing_the_installation_of_the_FIPS_Module + fipsinstall_cnf = os.path.join( + self.install_dir, "ssl", "fipsinstall.cnf" + ) + openssl_fips_cnf = os.path.join( + self.install_dir, "ssl", "openssl-fips.cnf" + ) + fips_mod = os.path.join(self.lib_dir, "ossl-modules/fips.so") + self._subprocess_call( + [ + self.openssl_cli, "fipsinstall", + "-out", fipsinstall_cnf, + "-module", fips_mod, + "-provider_name", "fips", + "-mac_name", "HMAC", + "-macopt", "digest:SHA256", + "-macopt", "hexkey:00", + "-section_name", "fips_sect" + ] + ) + with open(openssl_fips_cnf, "w") as f: + f.write(OPENSSL_FIPS_CNF.format(self=self)) + class BuildLibreSSL(AbstractBuilder): library = "LibreSSL" From webhook-mailer at python.org Fri May 15 13:10:21 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 17:10:21 -0000 Subject: [Python-checkins] bpo-40479: Test with latest OpenSSL versions (GH-20108) Message-ID: https://github.com/python/cpython/commit/5a06cf01ecb6a048fb47c086adc1336f54fe8789 commit: 5a06cf01ecb6a048fb47c086adc1336f54fe8789 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-15T10:10:15-07:00 summary: bpo-40479: Test with latest OpenSSL versions (GH-20108) * 1.0.2u (EOL) * 1.1.0l (EOL) * 1.1.1g * 3.0.0-alpha2 (disabled for now) Build the FIPS provider and create a FIPS configuration file for OpenSSL 3.0.0. Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran (cherry picked from commit 62d618c06bd395308b7163dbcb26c7e6d0922033) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst new file mode 100644 index 0000000000000..b59035971d7b0 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst @@ -0,0 +1,2 @@ +Update multissltest helper to test with latest OpenSSL 1.0.2, 1.1.0, 1.1.1, +and 3.0.0-alpha. diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 05d6d7de296db..7aa28bd2157fb 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -41,13 +41,13 @@ log = logging.getLogger("multissl") OPENSSL_OLD_VERSIONS = [ - "1.0.2", ] OPENSSL_RECENT_VERSIONS = [ - "1.0.2t", + "1.0.2u", "1.1.0l", - "1.1.1f", + "1.1.1g", + # "3.0.0-alpha2" ] LIBRESSL_OLD_VERSIONS = [ @@ -143,6 +143,23 @@ help="Keep original sources for debugging." ) +OPENSSL_FIPS_CNF = """\ +openssl_conf = openssl_init + +.include {self.install_dir}/ssl/fipsinstall.cnf +# .include {self.install_dir}/ssl/openssl.cnf + +[openssl_init] +providers = provider_sect + +[provider_sect] +fips = fips_sect +default = default_sect + +[default_sect] +activate = 1 +""" + class AbstractBuilder(object): library = None @@ -291,9 +308,13 @@ def _make_install(self): ["make", "-j1", self.install_target], cwd=self.build_dir ) + self._post_install() if not self.args.keep_sources: shutil.rmtree(self.build_dir) + def _post_install(self): + pass + def install(self): log.info(self.openssl_cli) if not self.has_openssl or self.args.force: @@ -365,6 +386,40 @@ class BuildOpenSSL(AbstractBuilder): # only install software, skip docs install_target = 'install_sw' + def _post_install(self): + if self.version.startswith("3.0"): + self._post_install_300() + + def _post_install_300(self): + # create ssl/ subdir with example configs + self._subprocess_call( + ["make", "-j1", "install_ssldirs"], + cwd=self.build_dir + ) + # Install FIPS module + # https://wiki.openssl.org/index.php/OpenSSL_3.0#Completing_the_installation_of_the_FIPS_Module + fipsinstall_cnf = os.path.join( + self.install_dir, "ssl", "fipsinstall.cnf" + ) + openssl_fips_cnf = os.path.join( + self.install_dir, "ssl", "openssl-fips.cnf" + ) + fips_mod = os.path.join(self.lib_dir, "ossl-modules/fips.so") + self._subprocess_call( + [ + self.openssl_cli, "fipsinstall", + "-out", fipsinstall_cnf, + "-module", fips_mod, + "-provider_name", "fips", + "-mac_name", "HMAC", + "-macopt", "digest:SHA256", + "-macopt", "hexkey:00", + "-section_name", "fips_sect" + ] + ) + with open(openssl_fips_cnf, "w") as f: + f.write(OPENSSL_FIPS_CNF.format(self=self)) + class BuildLibreSSL(AbstractBuilder): library = "LibreSSL" From webhook-mailer at python.org Fri May 15 14:55:33 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 15 May 2020 18:55:33 -0000 Subject: [Python-checkins] bpo-40515: Require OPENSSL_THREADS (GH-19953) Message-ID: https://github.com/python/cpython/commit/c087a268a4d4ead8ef2ca21e325423818729da89 commit: c087a268a4d4ead8ef2ca21e325423818729da89 branch: master author: Christian Heimes committer: GitHub date: 2020-05-15T11:55:25-07:00 summary: bpo-40515: Require OPENSSL_THREADS (GH-19953) The ``ssl`` and ``hashlib`` modules now actively check that OpenSSL is build with thread support. Python 3.7.0 made thread support mandatory and no longer works safely with a no-thread builds. Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst M Modules/_hashopenssl.c M Modules/_ssl.c diff --git a/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst b/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst new file mode 100644 index 0000000000000..af77a57fe7237 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst @@ -0,0 +1,3 @@ +The :mod:`ssl` and :mod:`hashlib` modules now actively check that OpenSSL is +build with thread support. Python 3.7.0 made thread support mandatory and no +longer works safely with a no-thread builds. diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index b7661b40d0a7b..b55ac93f2b076 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -27,6 +27,10 @@ #include // FIPS_mode() +#ifndef OPENSSL_THREADS +# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL" +#endif + #if (OPENSSL_VERSION_NUMBER < 0x10100000L) || defined(LIBRESSL_VERSION_NUMBER) /* OpenSSL < 1.1.0 */ #define EVP_MD_CTX_new EVP_MD_CTX_create diff --git a/Modules/_ssl.c b/Modules/_ssl.c index d633a06053ae3..987a99178775d 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -73,6 +73,10 @@ static PySocketModule_APIObject PySocketModule; # endif #endif +#ifndef OPENSSL_THREADS +# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL" +#endif + /* SSL error object */ static PyObject *PySSLErrorObject; static PyObject *PySSLCertVerificationErrorObject; @@ -6005,7 +6009,7 @@ PyInit__ssl(void) if (!_setup_ssl_threads()) { return NULL; } -#elif OPENSSL_VERSION_1_1 && defined(OPENSSL_THREADS) +#elif OPENSSL_VERSION_1_1 /* OpenSSL 1.1.0 builtin thread support is enabled */ _ssl_locks_count++; #endif From webhook-mailer at python.org Fri May 15 16:12:12 2020 From: webhook-mailer at python.org (romasku) Date: Fri, 15 May 2020 20:12:12 -0000 Subject: [Python-checkins] bpo-40607: Reraise exception during task cancelation in asyncio.wait_for() (GH-20054) Message-ID: https://github.com/python/cpython/commit/382a5635bd10c237c3e23e346b21cde27e48d7fa commit: 382a5635bd10c237c3e23e346b21cde27e48d7fa branch: master author: romasku committer: GitHub date: 2020-05-15T13:12:05-07:00 summary: bpo-40607: Reraise exception during task cancelation in asyncio.wait_for() (GH-20054) Currently, if asyncio.wait_for() timeout expires, it cancels inner future and then always raises TimeoutError. In case those future is task, it can handle cancelation mannually, and those process can lead to some other exception. Current implementation silently loses thoses exception. To resolve this, wait_for will check was the cancelation successfull or not. In case there was exception, wait_for will reraise it. Co-authored-by: Roman Skurikhin files: A Misc/NEWS.d/next/Library/2020-05-13-15-32-13.bpo-40607.uSPFCi.rst M Doc/library/asyncio-task.rst M Lib/asyncio/tasks.py M Lib/test/test_asyncio/test_tasks.py M Misc/ACKS diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 42e2b4e2fc5b9..bc8a2722bcca7 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -453,7 +453,8 @@ Timeouts wrap it in :func:`shield`. The function will wait until the future is actually cancelled, - so the total wait time may exceed the *timeout*. + so the total wait time may exceed the *timeout*. If an exception + happens during cancellation, it is propagated. If the wait is cancelled, the future *aw* is also cancelled. diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index 717837d856843..f5de1a2eea99f 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -496,7 +496,15 @@ async def wait_for(fut, timeout, *, loop=None): # after wait_for() returns. # See https://bugs.python.org/issue32751 await _cancel_and_wait(fut, loop=loop) - raise exceptions.TimeoutError() + # In case task cancellation failed with some + # exception, we should re-raise it + # See https://bugs.python.org/issue40607 + try: + fut.result() + except exceptions.CancelledError as exc: + raise exceptions.TimeoutError() from exc + else: + raise exceptions.TimeoutError() finally: timeout_handle.cancel() diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 6eb6b46ec8af7..0f8d921c5bc7f 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -80,6 +80,12 @@ def __await__(self): return self +# The following value can be used as a very small timeout: +# it passes check "timeout > 0", but has almost +# no effect on the test performance +_EPSILON = 0.0001 + + class BaseTaskTests: Task = None @@ -904,12 +910,53 @@ async def inner(): inner_task = self.new_task(loop, inner()) - with self.assertRaises(asyncio.TimeoutError): - await asyncio.wait_for(inner_task, timeout=0.1) + await asyncio.wait_for(inner_task, timeout=_EPSILON) - self.assertTrue(task_done) + with self.assertRaises(asyncio.TimeoutError) as cm: + loop.run_until_complete(foo()) - loop.run_until_complete(foo()) + self.assertTrue(task_done) + chained = cm.exception.__context__ + self.assertEqual(type(chained), asyncio.CancelledError) + + def test_wait_for_reraises_exception_during_cancellation(self): + loop = asyncio.new_event_loop() + self.addCleanup(loop.close) + + class FooException(Exception): + pass + + async def foo(): + async def inner(): + try: + await asyncio.sleep(0.2) + finally: + raise FooException + + inner_task = self.new_task(loop, inner()) + + await asyncio.wait_for(inner_task, timeout=_EPSILON) + + with self.assertRaises(FooException): + loop.run_until_complete(foo()) + + def test_wait_for_raises_timeout_error_if_returned_during_cancellation(self): + loop = asyncio.new_event_loop() + self.addCleanup(loop.close) + + async def foo(): + async def inner(): + try: + await asyncio.sleep(0.2) + except asyncio.CancelledError: + return 42 + + inner_task = self.new_task(loop, inner()) + + await asyncio.wait_for(inner_task, timeout=_EPSILON) + + with self.assertRaises(asyncio.TimeoutError): + loop.run_until_complete(foo()) def test_wait_for_self_cancellation(self): loop = asyncio.new_event_loop() diff --git a/Misc/ACKS b/Misc/ACKS index b479aa5d807f5..fad920b0510ad 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1593,6 +1593,7 @@ J. Sipprell Ngalim Siregar Kragen Sitaker Kaartic Sivaraam +Roman Skurikhin Ville Skytt? Michael Sloan Nick Sloan diff --git a/Misc/NEWS.d/next/Library/2020-05-13-15-32-13.bpo-40607.uSPFCi.rst b/Misc/NEWS.d/next/Library/2020-05-13-15-32-13.bpo-40607.uSPFCi.rst new file mode 100644 index 0000000000000..8060628b59548 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-13-15-32-13.bpo-40607.uSPFCi.rst @@ -0,0 +1,3 @@ +When cancelling a task due to timeout, :meth:`asyncio.wait_for` will now +propagate the exception if an error happens during cancellation. +Patch by Roman Skurikhin. From webhook-mailer at python.org Fri May 15 16:32:33 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 15 May 2020 20:32:33 -0000 Subject: [Python-checkins] bpo-40163: Fix multissltest download of old OpenSSL (GH-19329) Message-ID: https://github.com/python/cpython/commit/938717fd04c53c717895a756d5910e8c8813706c commit: 938717fd04c53c717895a756d5910e8c8813706c branch: master author: Christian Heimes committer: GitHub date: 2020-05-15T22:32:25+02:00 summary: bpo-40163: Fix multissltest download of old OpenSSL (GH-19329) files: A Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst b/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst new file mode 100644 index 0000000000000..fc0a22a0a953e --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst @@ -0,0 +1,3 @@ +Fix multissltest tool. OpenSSL has changed download URL for old releases. +The multissltest tool now tries to download from current and old download +URLs. diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 7aa28bd2157fb..0e37ec1bba93b 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -30,10 +30,12 @@ import os try: from urllib.request import urlopen + from urllib.error import HTTPError except ImportError: - from urllib2 import urlopen -import subprocess + from urllib2 import urlopen, HTTPError import shutil +import string +import subprocess import sys import tarfile @@ -163,7 +165,7 @@ class AbstractBuilder(object): library = None - url_template = None + url_templates = None src_template = None build_template = None install_target = 'install' @@ -202,6 +204,11 @@ def __eq__(self, other): def __hash__(self): return hash((self.library, self.version)) + @property + def short_version(self): + """Short version for OpenSSL download URL""" + return None + @property def openssl_cli(self): """openssl CLI binary""" @@ -255,11 +262,23 @@ def _download_src(self): src_dir = os.path.dirname(self.src_file) if not os.path.isdir(src_dir): os.makedirs(src_dir) - url = self.url_template.format(self.version) - log.info("Downloading from {}".format(url)) - req = urlopen(url) - # KISS, read all, write all - data = req.read() + data = None + for url_template in self.url_templates: + url = url_template.format(v=self.version, s=self.short_version) + log.info("Downloading from {}".format(url)) + try: + req = urlopen(url) + # KISS, read all, write all + data = req.read() + except HTTPError as e: + log.error( + "Download from {} has from failed: {}".format(url, e) + ) + else: + log.info("Successfully downloaded from {}".format(url)) + break + if data is None: + raise ValueError("All download URLs have failed") log.info("Storing {}".format(self.src_file)) with open(self.src_file, "wb") as f: f.write(data) @@ -380,7 +399,10 @@ def run_python_tests(self, tests, network=True): class BuildOpenSSL(AbstractBuilder): library = "OpenSSL" - url_template = "https://www.openssl.org/source/openssl-{}.tar.gz" + url_templates = ( + "https://www.openssl.org/source/openssl-{v}.tar.gz", + "https://www.openssl.org/source/old/{s}/openssl-{v}.tar.gz" + ) src_template = "openssl-{}.tar.gz" build_template = "openssl-{}" # only install software, skip docs @@ -419,12 +441,20 @@ def _post_install_300(self): ) with open(openssl_fips_cnf, "w") as f: f.write(OPENSSL_FIPS_CNF.format(self=self)) + @property + def short_version(self): + """Short version for OpenSSL download URL""" + short_version = self.version.rstrip(string.ascii_letters) + if short_version.startswith("0.9"): + short_version = "0.9.x" + return short_version class BuildLibreSSL(AbstractBuilder): library = "LibreSSL" - url_template = ( - "https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-{}.tar.gz") + url_templates = ( + "https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-{v}.tar.gz", + ) src_template = "libressl-{}.tar.gz" build_template = "libressl-{}" From webhook-mailer at python.org Fri May 15 16:36:58 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 15 May 2020 20:36:58 -0000 Subject: [Python-checkins] [3.8] bpo-40515: Require OPENSSL_THREADS (GH-19953) (GH-20119) Message-ID: https://github.com/python/cpython/commit/387c7441f589cc45ea86f1fa257af616c39d9a4b commit: 387c7441f589cc45ea86f1fa257af616c39d9a4b branch: 3.8 author: Christian Heimes committer: GitHub date: 2020-05-15T22:36:51+02:00 summary: [3.8] bpo-40515: Require OPENSSL_THREADS (GH-19953) (GH-20119) files: A Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst M Modules/_hashopenssl.c M Modules/_ssl.c diff --git a/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst b/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst new file mode 100644 index 0000000000000..af77a57fe7237 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst @@ -0,0 +1,3 @@ +The :mod:`ssl` and :mod:`hashlib` modules now actively check that OpenSSL is +build with thread support. Python 3.7.0 made thread support mandatory and no +longer works safely with a no-thread builds. diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 3e5f9c3e0df6a..edadbcb3933c1 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -26,6 +26,10 @@ #include #include "openssl/err.h" +#ifndef OPENSSL_THREADS +# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL" +#endif + #if (OPENSSL_VERSION_NUMBER < 0x10100000L) || defined(LIBRESSL_VERSION_NUMBER) /* OpenSSL < 1.1.0 */ #define EVP_MD_CTX_new EVP_MD_CTX_create diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 43b236c212120..1da65eae7a8b4 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -75,6 +75,10 @@ static PySocketModule_APIObject PySocketModule; # endif #endif +#ifndef OPENSSL_THREADS +# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL" +#endif + /* SSL error object */ static PyObject *PySSLErrorObject; static PyObject *PySSLCertVerificationErrorObject; @@ -6008,7 +6012,7 @@ PyInit__ssl(void) if (!_setup_ssl_threads()) { return NULL; } -#elif OPENSSL_VERSION_1_1 && defined(OPENSSL_THREADS) +#elif OPENSSL_VERSION_1_1 /* OpenSSL 1.1.0 builtin thread support is enabled */ _ssl_locks_count++; #endif From webhook-mailer at python.org Fri May 15 16:37:37 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 15 May 2020 20:37:37 -0000 Subject: [Python-checkins] [3.7] bpo-40515: Require OPENSSL_THREADS (GH-19953) (GH-20120) Message-ID: https://github.com/python/cpython/commit/efc9065904c4df8962e04303c2c03642f45019b5 commit: efc9065904c4df8962e04303c2c03642f45019b5 branch: 3.7 author: Christian Heimes committer: GitHub date: 2020-05-15T22:37:32+02:00 summary: [3.7] bpo-40515: Require OPENSSL_THREADS (GH-19953) (GH-20120) files: A Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst M Modules/_hashopenssl.c M Modules/_ssl.c diff --git a/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst b/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst new file mode 100644 index 0000000000000..af77a57fe7237 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst @@ -0,0 +1,3 @@ +The :mod:`ssl` and :mod:`hashlib` modules now actively check that OpenSSL is +build with thread support. Python 3.7.0 made thread support mandatory and no +longer works safely with a no-thread builds. diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index b13ade6049614..d66709ae058e9 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -32,6 +32,10 @@ module _hashlib [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=c2b4ff081bac4be1]*/ +#ifndef OPENSSL_THREADS +# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL" +#endif + #define MUNCH_SIZE INT_MAX #ifndef HASH_OBJ_CONSTRUCTOR diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 4611710a95def..94606ef0e2993 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -75,6 +75,10 @@ static PySocketModule_APIObject PySocketModule; # endif #endif +#ifndef OPENSSL_THREADS +# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL" +#endif + /* SSL error object */ static PyObject *PySSLErrorObject; static PyObject *PySSLCertVerificationErrorObject; @@ -5875,7 +5879,7 @@ PyInit__ssl(void) if (!_setup_ssl_threads()) { return NULL; } -#elif OPENSSL_VERSION_1_1 && defined(OPENSSL_THREADS) +#elif OPENSSL_VERSION_1_1 /* OpenSSL 1.1.0 builtin thread support is enabled */ _ssl_locks_count++; #endif From webhook-mailer at python.org Fri May 15 16:55:02 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 20:55:02 -0000 Subject: [Python-checkins] bpo-40163: Fix multissltest download of old OpenSSL (GH-19329) Message-ID: https://github.com/python/cpython/commit/7a89f9b4e2c05a6abdf59e8a96a1fc80a47a1144 commit: 7a89f9b4e2c05a6abdf59e8a96a1fc80a47a1144 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-15T13:54:54-07:00 summary: bpo-40163: Fix multissltest download of old OpenSSL (GH-19329) (cherry picked from commit 938717fd04c53c717895a756d5910e8c8813706c) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst b/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst new file mode 100644 index 0000000000000..fc0a22a0a953e --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst @@ -0,0 +1,3 @@ +Fix multissltest tool. OpenSSL has changed download URL for old releases. +The multissltest tool now tries to download from current and old download +URLs. diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 7aa28bd2157fb..0e37ec1bba93b 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -30,10 +30,12 @@ import os try: from urllib.request import urlopen + from urllib.error import HTTPError except ImportError: - from urllib2 import urlopen -import subprocess + from urllib2 import urlopen, HTTPError import shutil +import string +import subprocess import sys import tarfile @@ -163,7 +165,7 @@ class AbstractBuilder(object): library = None - url_template = None + url_templates = None src_template = None build_template = None install_target = 'install' @@ -202,6 +204,11 @@ def __eq__(self, other): def __hash__(self): return hash((self.library, self.version)) + @property + def short_version(self): + """Short version for OpenSSL download URL""" + return None + @property def openssl_cli(self): """openssl CLI binary""" @@ -255,11 +262,23 @@ def _download_src(self): src_dir = os.path.dirname(self.src_file) if not os.path.isdir(src_dir): os.makedirs(src_dir) - url = self.url_template.format(self.version) - log.info("Downloading from {}".format(url)) - req = urlopen(url) - # KISS, read all, write all - data = req.read() + data = None + for url_template in self.url_templates: + url = url_template.format(v=self.version, s=self.short_version) + log.info("Downloading from {}".format(url)) + try: + req = urlopen(url) + # KISS, read all, write all + data = req.read() + except HTTPError as e: + log.error( + "Download from {} has from failed: {}".format(url, e) + ) + else: + log.info("Successfully downloaded from {}".format(url)) + break + if data is None: + raise ValueError("All download URLs have failed") log.info("Storing {}".format(self.src_file)) with open(self.src_file, "wb") as f: f.write(data) @@ -380,7 +399,10 @@ def run_python_tests(self, tests, network=True): class BuildOpenSSL(AbstractBuilder): library = "OpenSSL" - url_template = "https://www.openssl.org/source/openssl-{}.tar.gz" + url_templates = ( + "https://www.openssl.org/source/openssl-{v}.tar.gz", + "https://www.openssl.org/source/old/{s}/openssl-{v}.tar.gz" + ) src_template = "openssl-{}.tar.gz" build_template = "openssl-{}" # only install software, skip docs @@ -419,12 +441,20 @@ def _post_install_300(self): ) with open(openssl_fips_cnf, "w") as f: f.write(OPENSSL_FIPS_CNF.format(self=self)) + @property + def short_version(self): + """Short version for OpenSSL download URL""" + short_version = self.version.rstrip(string.ascii_letters) + if short_version.startswith("0.9"): + short_version = "0.9.x" + return short_version class BuildLibreSSL(AbstractBuilder): library = "LibreSSL" - url_template = ( - "https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-{}.tar.gz") + url_templates = ( + "https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-{v}.tar.gz", + ) src_template = "libressl-{}.tar.gz" build_template = "libressl-{}" From webhook-mailer at python.org Fri May 15 16:55:49 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 20:55:49 -0000 Subject: [Python-checkins] bpo-40163: Fix multissltest download of old OpenSSL (GH-19329) Message-ID: https://github.com/python/cpython/commit/fcea08059f46d2d9582bb7ce5b2e905b20b86e8e commit: fcea08059f46d2d9582bb7ce5b2e905b20b86e8e branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-15T13:55:45-07:00 summary: bpo-40163: Fix multissltest download of old OpenSSL (GH-19329) (cherry picked from commit 938717fd04c53c717895a756d5910e8c8813706c) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst b/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst new file mode 100644 index 0000000000000..fc0a22a0a953e --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst @@ -0,0 +1,3 @@ +Fix multissltest tool. OpenSSL has changed download URL for old releases. +The multissltest tool now tries to download from current and old download +URLs. diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 7aa28bd2157fb..0e37ec1bba93b 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -30,10 +30,12 @@ import os try: from urllib.request import urlopen + from urllib.error import HTTPError except ImportError: - from urllib2 import urlopen -import subprocess + from urllib2 import urlopen, HTTPError import shutil +import string +import subprocess import sys import tarfile @@ -163,7 +165,7 @@ class AbstractBuilder(object): library = None - url_template = None + url_templates = None src_template = None build_template = None install_target = 'install' @@ -202,6 +204,11 @@ def __eq__(self, other): def __hash__(self): return hash((self.library, self.version)) + @property + def short_version(self): + """Short version for OpenSSL download URL""" + return None + @property def openssl_cli(self): """openssl CLI binary""" @@ -255,11 +262,23 @@ def _download_src(self): src_dir = os.path.dirname(self.src_file) if not os.path.isdir(src_dir): os.makedirs(src_dir) - url = self.url_template.format(self.version) - log.info("Downloading from {}".format(url)) - req = urlopen(url) - # KISS, read all, write all - data = req.read() + data = None + for url_template in self.url_templates: + url = url_template.format(v=self.version, s=self.short_version) + log.info("Downloading from {}".format(url)) + try: + req = urlopen(url) + # KISS, read all, write all + data = req.read() + except HTTPError as e: + log.error( + "Download from {} has from failed: {}".format(url, e) + ) + else: + log.info("Successfully downloaded from {}".format(url)) + break + if data is None: + raise ValueError("All download URLs have failed") log.info("Storing {}".format(self.src_file)) with open(self.src_file, "wb") as f: f.write(data) @@ -380,7 +399,10 @@ def run_python_tests(self, tests, network=True): class BuildOpenSSL(AbstractBuilder): library = "OpenSSL" - url_template = "https://www.openssl.org/source/openssl-{}.tar.gz" + url_templates = ( + "https://www.openssl.org/source/openssl-{v}.tar.gz", + "https://www.openssl.org/source/old/{s}/openssl-{v}.tar.gz" + ) src_template = "openssl-{}.tar.gz" build_template = "openssl-{}" # only install software, skip docs @@ -419,12 +441,20 @@ def _post_install_300(self): ) with open(openssl_fips_cnf, "w") as f: f.write(OPENSSL_FIPS_CNF.format(self=self)) + @property + def short_version(self): + """Short version for OpenSSL download URL""" + short_version = self.version.rstrip(string.ascii_letters) + if short_version.startswith("0.9"): + short_version = "0.9.x" + return short_version class BuildLibreSSL(AbstractBuilder): library = "LibreSSL" - url_template = ( - "https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-{}.tar.gz") + url_templates = ( + "https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-{v}.tar.gz", + ) src_template = "libressl-{}.tar.gz" build_template = "libressl-{}" From webhook-mailer at python.org Fri May 15 17:26:08 2020 From: webhook-mailer at python.org (Gregory P. Smith) Date: Fri, 15 May 2020 21:26:08 -0000 Subject: [Python-checkins] bpo-40636: Clarify the zip built-in docstring. (GH-20118) Message-ID: https://github.com/python/cpython/commit/6a5d3ff67644af42b1a781be2eacb2e82913441c commit: 6a5d3ff67644af42b1a781be2eacb2e82913441c branch: master author: Gregory P. Smith committer: GitHub date: 2020-05-15T14:26:00-07:00 summary: bpo-40636: Clarify the zip built-in docstring. (GH-20118) Clarify the zip built-in docstring. This puts much simpler text up front along with an example. As it was, the zip built-in docstring was technically correct. But too technical for the reader who shouldn't _need_ to know about `__next__` and `StopIteration` as most people do not need to understand the internal implementation details of the iterator protocol in their daily life. This is a documentation only change, intended to be backported to 3.8; it is only tangentially related to PEP-618 which might offer new behavior options in the future. Wording based a bit more on enumerate per Brandt's suggestion. This gets rid of the legacy wording paragraph which seems too tied to implementation details of the iterator protocol which isn't relevant here. Co-authored-by: Brandt Bucher files: M Lib/test/test_doctest.py M Python/bltinmodule.c diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py index 16196fed39247..3efe5dafc20ad 100644 --- a/Lib/test/test_doctest.py +++ b/Lib/test/test_doctest.py @@ -669,7 +669,7 @@ def non_Python_modules(): r""" True >>> real_tests = [t for t in tests if len(t.examples) > 0] >>> len(real_tests) # objects that actually have doctests - 12 + 13 >>> for t in real_tests: ... print('{} {}'.format(len(t.examples), t.name)) ... @@ -685,6 +685,7 @@ def non_Python_modules(): r""" 2 builtins.int.bit_length 5 builtins.memoryview.hex 1 builtins.oct + 1 builtins.zip Note here that 'bin', 'oct', and 'hex' are functions; 'float.as_integer_ratio', 'float.hex', and 'int.bit_length' are methods; 'float.fromhex' is a classmethod, diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 82ca317c7e9c2..199b09c4d8c41 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2649,12 +2649,15 @@ static PyMethodDef zip_methods[] = { }; PyDoc_STRVAR(zip_doc, -"zip(*iterables) --> zip object\n\ +"zip(*iterables) --> A zip object yielding tuples until an input is exhausted.\n\ \n\ -Return a zip object whose .__next__() method returns a tuple where\n\ -the i-th element comes from the i-th iterable argument. The .__next__()\n\ -method continues until the shortest iterable in the argument sequence\n\ -is exhausted and then it raises StopIteration."); + >>> list(zip('abcdefg', range(3), range(4)))\n\ + [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)]\n\ +\n\ +The zip object yields n-length tuples, where n is the number of iterables\n\ +passed as positional arguments to zip(). The i-th element in every tuple\n\ +comes from the i-th iterable argument to zip(). This continues until the\n\ +shortest argument is exhausted."); PyTypeObject PyZip_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) From webhook-mailer at python.org Fri May 15 17:28:27 2020 From: webhook-mailer at python.org (Shantanu) Date: Fri, 15 May 2020 21:28:27 -0000 Subject: [Python-checkins] bpo-40445: Update compileall.compile_dir docs (GH-19806) Message-ID: https://github.com/python/cpython/commit/a2b3cdd661a4b6c6c74adbfcb6ac1865bfd2a011 commit: a2b3cdd661a4b6c6c74adbfcb6ac1865bfd2a011 branch: master author: Shantanu committer: GitHub date: 2020-05-15T14:28:23-07:00 summary: bpo-40445: Update compileall.compile_dir docs (GH-19806) files: M Doc/library/compileall.rst diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst index a511c7eda265b..01ab7461e9b1c 100644 --- a/Doc/library/compileall.rst +++ b/Doc/library/compileall.rst @@ -155,7 +155,7 @@ Public functions and a false value otherwise. The *maxlevels* parameter is used to limit the depth of the recursion; it - defaults to ``10``. + defaults to ``sys.getrecursionlimit()``. If *ddir* is given, it is prepended to the path to each file being compiled for use in compilation time tracebacks, and is also compiled in to the @@ -228,6 +228,7 @@ Public functions .. versionchanged:: 3.9 Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments. + Default value of *maxlevels* was changed from ``10`` to ``sys.getrecursionlimit()`` .. function:: compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None, hardlink_dupes=False) From webhook-mailer at python.org Fri May 15 17:43:34 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 15 May 2020 21:43:34 -0000 Subject: [Python-checkins] bpo-40636: Clarify the zip built-in docstring. (GH-20118) Message-ID: https://github.com/python/cpython/commit/c3d025a86a60348f19551bd9921304c5db322531 commit: c3d025a86a60348f19551bd9921304c5db322531 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-15T14:43:27-07:00 summary: bpo-40636: Clarify the zip built-in docstring. (GH-20118) Clarify the zip built-in docstring. This puts much simpler text up front along with an example. As it was, the zip built-in docstring was technically correct. But too technical for the reader who shouldn't _need_ to know about `__next__` and `StopIteration` as most people do not need to understand the internal implementation details of the iterator protocol in their daily life. This is a documentation only change, intended to be backported to 3.8; it is only tangentially related to PEP-618 which might offer new behavior options in the future. Wording based a bit more on enumerate per Brandt's suggestion. This gets rid of the legacy wording paragraph which seems too tied to implementation details of the iterator protocol which isn't relevant here. Co-authored-by: Brandt Bucher (cherry picked from commit 6a5d3ff67644af42b1a781be2eacb2e82913441c) Co-authored-by: Gregory P. Smith files: M Lib/test/test_doctest.py M Python/bltinmodule.c diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py index 502b90e7ed21c..ad30a051b59c6 100644 --- a/Lib/test/test_doctest.py +++ b/Lib/test/test_doctest.py @@ -669,7 +669,7 @@ def non_Python_modules(): r""" True >>> real_tests = [t for t in tests if len(t.examples) > 0] >>> len(real_tests) # objects that actually have doctests - 12 + 13 >>> for t in real_tests: ... print('{} {}'.format(len(t.examples), t.name)) ... @@ -685,6 +685,7 @@ def non_Python_modules(): r""" 2 builtins.int.bit_length 5 builtins.memoryview.hex 1 builtins.oct + 1 builtins.zip Note here that 'bin', 'oct', and 'hex' are functions; 'float.as_integer_ratio', 'float.hex', and 'int.bit_length' are methods; 'float.fromhex' is a classmethod, diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index fe22bbdde4e91..e42d5f246c37a 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2648,12 +2648,15 @@ static PyMethodDef zip_methods[] = { }; PyDoc_STRVAR(zip_doc, -"zip(*iterables) --> zip object\n\ +"zip(*iterables) --> A zip object yielding tuples until an input is exhausted.\n\ \n\ -Return a zip object whose .__next__() method returns a tuple where\n\ -the i-th element comes from the i-th iterable argument. The .__next__()\n\ -method continues until the shortest iterable in the argument sequence\n\ -is exhausted and then it raises StopIteration."); + >>> list(zip('abcdefg', range(3), range(4)))\n\ + [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)]\n\ +\n\ +The zip object yields n-length tuples, where n is the number of iterables\n\ +passed as positional arguments to zip(). The i-th element in every tuple\n\ +comes from the i-th iterable argument to zip(). This continues until the\n\ +shortest argument is exhausted."); PyTypeObject PyZip_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) From webhook-mailer at python.org Fri May 15 17:54:59 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 15 May 2020 21:54:59 -0000 Subject: [Python-checkins] bpo-40637: Add option to disable builtin hashes (GH-20121) Message-ID: https://github.com/python/cpython/commit/9b60e55db2897acc30d6b9ef1dbc49674eed40c7 commit: 9b60e55db2897acc30d6b9ef1dbc49674eed40c7 branch: master author: Christian Heimes committer: GitHub date: 2020-05-15T14:54:53-07:00 summary: bpo-40637: Add option to disable builtin hashes (GH-20121) Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran files: A Misc/NEWS.d/next/Library/2020-05-15-21-57-10.bpo-40637.lb3Bnp.rst M Doc/whatsnew/3.9.rst M configure M configure.ac M pyconfig.h.in M setup.py diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index fbad0fba20f4b..c721a167440c3 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -314,6 +314,15 @@ Added a new function :func:`gc.is_finalized` to check if an object has been finalized by the garbage collector. (Contributed by Pablo Galindo in :issue:`39322`.) +hashlib +------- + +Builtin hash modules can now be disabled with +``./configure --without-builtin-hashlib-hashes`` or selectively enabled with +e.g. ``./configure --with-builtin-hashlib-hashes=sha3,blake2`` to force use +of OpenSSL based implementation. +(Contributed by Christian Heimes in :issue:`40479`) + http ---- diff --git a/Misc/NEWS.d/next/Library/2020-05-15-21-57-10.bpo-40637.lb3Bnp.rst b/Misc/NEWS.d/next/Library/2020-05-15-21-57-10.bpo-40637.lb3Bnp.rst new file mode 100644 index 0000000000000..d05e57d86b6ec --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-15-21-57-10.bpo-40637.lb3Bnp.rst @@ -0,0 +1,2 @@ +Builtin hash modules can now be disabled or selectively enabled with +``configure --with-builtin-hashlib-hashes=sha3,blake1`` or ``--without-builtin-hashlib-hashes``. diff --git a/configure b/configure index 26e9aa9fe454e..64bcde6bfdfb9 100755 --- a/configure +++ b/configure @@ -845,6 +845,7 @@ with_computed_gotos with_ensurepip with_openssl with_ssl_default_suites +with_builtin_hashlib_hashes with_experimental_isolated_subinterpreters ' ac_precious_vars='build_alias @@ -1576,6 +1577,9 @@ Optional Packages: leave OpenSSL's defaults untouched, STRING: use a custom string, PROTOCOL_SSLv2 ignores the setting, see Doc/library/ssl.rst + --with-builtin-hashlib-hashes=md5,sha1,sha256,sha512,sha3,blake2 + builtin hash modules, md5, sha1, sha256, sha512, + sha3 (with shake), blake2 --with-experimental-isolated-subinterpreters better isolate subinterpreters, experimental build mode (default is no) @@ -17493,6 +17497,44 @@ $as_echo "#define PY_SSL_DEFAULT_CIPHERS 1" >>confdefs.h fi +# builtin hash modules +default_hashlib_hashes="md5,sha1,sha256,sha512,sha3,blake2" + +$as_echo "#define PY_BUILTIN_HASHLIB_HASHES /**/" >>confdefs.h + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-builtin-hashlib-hashes" >&5 +$as_echo_n "checking for --with-builtin-hashlib-hashes... " >&6; } + +# Check whether --with-builtin-hashlib-hashes was given. +if test "${with_builtin_hashlib_hashes+set}" = set; then : + withval=$with_builtin_hashlib_hashes; +case "$withval" in + yes) + withval=$default_hashlib_hashes + ;; + no) + withval="" + ;; +esac +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $withval" >&5 +$as_echo "$withval" >&6; } +cat >>confdefs.h <<_ACEOF +#define PY_BUILTIN_HASHLIB_HASHES "$withval" +_ACEOF + + +else + +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $default_hashlib_hashes" >&5 +$as_echo "$default_hashlib_hashes" >&6; }; +cat >>confdefs.h <<_ACEOF +#define PY_BUILTIN_HASHLIB_HASHES "$default_hashlib_hashes" +_ACEOF + + +fi + + # --with-experimental-isolated-subinterpreters { $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-experimental-isolated-subinterpreters" >&5 diff --git a/configure.ac b/configure.ac index acb6d4bfa8da1..21c47b56358b1 100644 --- a/configure.ac +++ b/configure.ac @@ -5717,6 +5717,32 @@ AC_MSG_RESULT(python) AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1) ]) +# builtin hash modules +default_hashlib_hashes="md5,sha1,sha256,sha512,sha3,blake2" +AC_DEFINE([PY_BUILTIN_HASHLIB_HASHES], [], [enabled builtin hash modules] +) +AC_MSG_CHECKING(for --with-builtin-hashlib-hashes) +AC_ARG_WITH(builtin-hashlib-hashes, + AS_HELP_STRING([--with-builtin-hashlib-hashes=md5,sha1,sha256,sha512,sha3,blake2], + [builtin hash modules, + md5, sha1, sha256, sha512, sha3 (with shake), blake2]), +[ +case "$withval" in + yes) + withval=$default_hashlib_hashes + ;; + no) + withval="" + ;; +esac +AC_MSG_RESULT($withval) +AC_DEFINE_UNQUOTED(PY_BUILTIN_HASHLIB_HASHES, "$withval") +], +[ +AC_MSG_RESULT($default_hashlib_hashes); +AC_DEFINE_UNQUOTED(PY_BUILTIN_HASHLIB_HASHES, "$default_hashlib_hashes") +]) + # --with-experimental-isolated-subinterpreters AH_TEMPLATE(EXPERIMENTAL_ISOLATED_SUBINTERPRETERS, [Better isolate subinterpreters, experimental build mode.]) diff --git a/pyconfig.h.in b/pyconfig.h.in index c06c4958726c0..bc906a869b623 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -1385,6 +1385,9 @@ /* Define as the preferred size in bits of long digits */ #undef PYLONG_BITS_IN_DIGIT +/* enabled builtin hash modules */ +#undef PY_BUILTIN_HASHLIB_HASHES + /* Define if you want to coerce the C locale to a UTF-8 based locale */ #undef PY_COERCE_C_LOCALE diff --git a/setup.py b/setup.py index 878372154d411..794ba2f766237 100644 --- a/setup.py +++ b/setup.py @@ -327,6 +327,7 @@ def __init__(self, dist): self.failed = [] self.failed_on_import = [] self.missing = [] + self.disabled_configure = [] if '-j' in os.environ.get('MAKEFLAGS', ''): self.parallel = True @@ -483,6 +484,14 @@ def print_three_column(lst): print_three_column([ext.name for ext in mods_disabled]) print() + if self.disabled_configure: + print() + print("The following modules found by detect_modules() in" + " setup.py have not") + print("been built, they are *disabled* by configure:") + print_three_column(self.disabled_configure) + print() + if self.failed: failed = self.failed[:] print() @@ -2295,36 +2304,73 @@ def split_var(name, sep): libraries=openssl_libs)) def detect_hash_builtins(self): - # We always compile these even when OpenSSL is available (issue #14693). - # It's harmless and the object code is tiny (40-50 KiB per module, - # only loaded when actually used). - self.add(Extension('_sha256', ['sha256module.c'], - extra_compile_args=['-DPy_BUILD_CORE_MODULE'], - depends=['hashlib.h'])) - self.add(Extension('_sha512', ['sha512module.c'], - extra_compile_args=['-DPy_BUILD_CORE_MODULE'], - depends=['hashlib.h'])) - self.add(Extension('_md5', ['md5module.c'], - depends=['hashlib.h'])) - self.add(Extension('_sha1', ['sha1module.c'], - depends=['hashlib.h'])) - - blake2_deps = glob(os.path.join(self.srcdir, - 'Modules/_blake2/impl/*')) - blake2_deps.append('hashlib.h') - - self.add(Extension('_blake2', - ['_blake2/blake2module.c', - '_blake2/blake2b_impl.c', - '_blake2/blake2s_impl.c'], - depends=blake2_deps)) - - sha3_deps = glob(os.path.join(self.srcdir, - 'Modules/_sha3/kcp/*')) - sha3_deps.append('hashlib.h') - self.add(Extension('_sha3', - ['_sha3/sha3module.c'], - depends=sha3_deps)) + # By default we always compile these even when OpenSSL is available + # (issue #14693). It's harmless and the object code is tiny + # (40-50 KiB per module, only loaded when actually used). Modules can + # be disabled via the --with-builtin-hashlib-hashes configure flag. + supported = {"md5", "sha1", "sha256", "sha512", "sha3", "blake2"} + + configured = sysconfig.get_config_var("PY_BUILTIN_HASHLIB_HASHES") + configured = configured.strip('"').lower() + configured = { + m.strip() for m in configured.split(",") + } + + self.disabled_configure.extend( + sorted(supported.difference(configured)) + ) + + if "sha256" in configured: + self.add(Extension( + '_sha256', ['sha256module.c'], + extra_compile_args=['-DPy_BUILD_CORE_MODULE'], + depends=['hashlib.h'] + )) + + if "sha512" in configured: + self.add(Extension( + '_sha512', ['sha512module.c'], + extra_compile_args=['-DPy_BUILD_CORE_MODULE'], + depends=['hashlib.h'] + )) + + if "md5" in configured: + self.add(Extension( + '_md5', ['md5module.c'], + depends=['hashlib.h'] + )) + + if "sha1" in configured: + self.add(Extension( + '_sha1', ['sha1module.c'], + depends=['hashlib.h'] + )) + + if "blake2" in configured: + blake2_deps = glob( + os.path.join(self.srcdir, 'Modules/_blake2/impl/*') + ) + blake2_deps.append('hashlib.h') + self.add(Extension( + '_blake2', + [ + '_blake2/blake2module.c', + '_blake2/blake2b_impl.c', + '_blake2/blake2s_impl.c' + ], + depends=blake2_deps + )) + + if "sha3" in configured: + sha3_deps = glob( + os.path.join(self.srcdir, 'Modules/_sha3/kcp/*') + ) + sha3_deps.append('hashlib.h') + self.add(Extension( + '_sha3', + ['_sha3/sha3module.c'], + depends=sha3_deps + )) def detect_nis(self): if MS_WINDOWS or CYGWIN or HOST_PLATFORM == 'qnx6': From webhook-mailer at python.org Fri May 15 18:19:45 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Fri, 15 May 2020 22:19:45 -0000 Subject: [Python-checkins] Add Andrew York to ACKS for GH-19622. (GH-20105) Message-ID: https://github.com/python/cpython/commit/fe1176e882393b6d3e6a6cfa5ca23657f0b3b4a9 commit: fe1176e882393b6d3e6a6cfa5ca23657f0b3b4a9 branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-15T15:19:39-07:00 summary: Add Andrew York to ACKS for GH-19622. (GH-20105) This updates ACKS for commit 003708bcf8f2c58d4b65f68318acf164d713e008 contributed by Andrew York. files: M Misc/ACKS diff --git a/Misc/ACKS b/Misc/ACKS index fad920b0510ad..a9345e097741f 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1906,6 +1906,7 @@ EungJun Yi Bob Yodlowski Danny Yoo Wonsup Yoon +Andrew York Rory Yorke George Yoshida Kazuhiro Yoshida From webhook-mailer at python.org Fri May 15 19:55:58 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Fri, 15 May 2020 23:55:58 -0000 Subject: [Python-checkins] bpo-31033: Add a msg argument to Future.cancel() and Task.cancel() (GH-19979) Message-ID: https://github.com/python/cpython/commit/1ce5841eca6d96b1b1e8c213d04f2e92b1619bb5 commit: 1ce5841eca6d96b1b1e8c213d04f2e92b1619bb5 branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-15T16:55:50-07:00 summary: bpo-31033: Add a msg argument to Future.cancel() and Task.cancel() (GH-19979) files: A Misc/NEWS.d/next/Library/2020-05-07-06-41-20.bpo-31033.waCj3n.rst M Doc/library/asyncio-future.rst M Doc/library/asyncio-task.rst M Lib/asyncio/futures.py M Lib/asyncio/tasks.py M Lib/asyncio/windows_events.py M Lib/test/test_asyncio/test_futures.py M Lib/test/test_asyncio/test_tasks.py M Modules/_asynciomodule.c M Modules/clinic/_asynciomodule.c.h diff --git a/Doc/library/asyncio-future.rst b/Doc/library/asyncio-future.rst index 832d58119b7b0..e1ac18eaf0988 100644 --- a/Doc/library/asyncio-future.rst +++ b/Doc/library/asyncio-future.rst @@ -170,7 +170,7 @@ Future Object Returns the number of callbacks removed, which is typically 1, unless a callback was added more than once. - .. method:: cancel() + .. method:: cancel(msg=None) Cancel the Future and schedule callbacks. @@ -178,6 +178,9 @@ Future Object Otherwise, change the Future's state to *cancelled*, schedule the callbacks, and return ``True``. + .. versionchanged:: 3.9 + Added the ``msg`` parameter. + .. method:: exception() Return the exception that was set on this Future. @@ -255,3 +258,6 @@ the Future has a result:: - asyncio Future is not compatible with the :func:`concurrent.futures.wait` and :func:`concurrent.futures.as_completed` functions. + + - :meth:`asyncio.Future.cancel` accepts an optional ``msg`` argument, + but :func:`concurrent.futures.cancel` does not. diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index bc8a2722bcca7..2e963398d9300 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -724,7 +724,7 @@ Task Object .. deprecated-removed:: 3.8 3.10 The *loop* parameter. - .. method:: cancel() + .. method:: cancel(msg=None) Request the Task to be cancelled. @@ -739,6 +739,9 @@ Task Object suppressing cancellation completely is not common and is actively discouraged. + .. versionchanged:: 3.9 + Added the ``msg`` parameter. + .. _asyncio_example_task_cancel: The following example illustrates how coroutines can intercept diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index a3cf379ee8170..889f3e6eb86b0 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -51,6 +51,7 @@ class Future: _exception = None _loop = None _source_traceback = None + _cancel_message = None # This field is used for a dual purpose: # - Its presence is a marker to declare that a class implements @@ -123,7 +124,7 @@ def get_loop(self): raise RuntimeError("Future object is not initialized.") return loop - def cancel(self): + def cancel(self, msg=None): """Cancel the future and schedule callbacks. If the future is already done or cancelled, return False. Otherwise, @@ -134,6 +135,7 @@ def cancel(self): if self._state != _PENDING: return False self._state = _CANCELLED + self._cancel_message = msg self.__schedule_callbacks() return True @@ -173,7 +175,9 @@ def result(self): the future is done and has an exception set, this exception is raised. """ if self._state == _CANCELLED: - raise exceptions.CancelledError + raise exceptions.CancelledError( + '' if self._cancel_message is None else self._cancel_message) + if self._state != _FINISHED: raise exceptions.InvalidStateError('Result is not ready.') self.__log_traceback = False @@ -190,7 +194,8 @@ def exception(self): InvalidStateError. """ if self._state == _CANCELLED: - raise exceptions.CancelledError + raise exceptions.CancelledError( + '' if self._cancel_message is None else self._cancel_message) if self._state != _FINISHED: raise exceptions.InvalidStateError('Exception is not set.') self.__log_traceback = False diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index f5de1a2eea99f..a3a0a33ee03da 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -230,7 +230,7 @@ def print_stack(self, *, limit=None, file=None): """ return base_tasks._task_print_stack(self, limit, file) - def cancel(self): + def cancel(self, msg=None): """Request that this task cancel itself. This arranges for a CancelledError to be thrown into the @@ -254,13 +254,14 @@ def cancel(self): if self.done(): return False if self._fut_waiter is not None: - if self._fut_waiter.cancel(): + if self._fut_waiter.cancel(msg=msg): # Leave self._fut_waiter; it may be a Task that # catches and ignores the cancellation so we may have # to cancel it again later. return True # It must be the case that self.__step is already scheduled. self._must_cancel = True + self._cancel_message = msg return True def __step(self, exc=None): @@ -269,7 +270,8 @@ def __step(self, exc=None): f'_step(): already done: {self!r}, {exc!r}') if self._must_cancel: if not isinstance(exc, exceptions.CancelledError): - exc = exceptions.CancelledError() + exc = exceptions.CancelledError('' + if self._cancel_message is None else self._cancel_message) self._must_cancel = False coro = self._coro self._fut_waiter = None @@ -287,11 +289,15 @@ def __step(self, exc=None): if self._must_cancel: # Task is cancelled right before coro stops. self._must_cancel = False - super().cancel() + super().cancel(msg=self._cancel_message) else: super().set_result(exc.value) - except exceptions.CancelledError: - super().cancel() # I.e., Future.cancel(self). + except exceptions.CancelledError as exc: + if exc.args: + cancel_msg = exc.args[0] + else: + cancel_msg = None + super().cancel(msg=cancel_msg) # I.e., Future.cancel(self). except (KeyboardInterrupt, SystemExit) as exc: super().set_exception(exc) raise @@ -319,7 +325,8 @@ def __step(self, exc=None): self.__wakeup, context=self._context) self._fut_waiter = result if self._must_cancel: - if self._fut_waiter.cancel(): + if self._fut_waiter.cancel( + msg=self._cancel_message): self._must_cancel = False else: new_exc = RuntimeError( @@ -716,12 +723,12 @@ def __init__(self, children, *, loop=None): self._children = children self._cancel_requested = False - def cancel(self): + def cancel(self, msg=None): if self.done(): return False ret = False for child in self._children: - if child.cancel(): + if child.cancel(msg=msg): ret = True if ret: # If any child tasks were actually cancelled, we should @@ -780,7 +787,8 @@ def _done_callback(fut): # Check if 'fut' is cancelled first, as # 'fut.exception()' will *raise* a CancelledError # instead of returning it. - exc = exceptions.CancelledError() + exc = exceptions.CancelledError('' + if fut._cancel_message is None else fut._cancel_message) outer.set_exception(exc) return else: @@ -799,7 +807,9 @@ def _done_callback(fut): # Check if 'fut' is cancelled first, as # 'fut.exception()' will *raise* a CancelledError # instead of returning it. - res = exceptions.CancelledError() + res = exceptions.CancelledError( + '' if fut._cancel_message is None else + fut._cancel_message) else: res = fut.exception() if res is None: @@ -810,7 +820,9 @@ def _done_callback(fut): # If gather is being cancelled we must propagate the # cancellation regardless of *return_exceptions* argument. # See issue 32684. - outer.set_exception(exceptions.CancelledError()) + exc = exceptions.CancelledError('' + if fut._cancel_message is None else fut._cancel_message) + outer.set_exception(exc) else: outer.set_result(results) diff --git a/Lib/asyncio/windows_events.py b/Lib/asyncio/windows_events.py index ac51109ff1a83..c07fe3241c569 100644 --- a/Lib/asyncio/windows_events.py +++ b/Lib/asyncio/windows_events.py @@ -75,9 +75,9 @@ def _cancel_overlapped(self): self._loop.call_exception_handler(context) self._ov = None - def cancel(self): + def cancel(self, msg=None): self._cancel_overlapped() - return super().cancel() + return super().cancel(msg=msg) def set_exception(self, exception): super().set_exception(exception) @@ -149,9 +149,9 @@ def _unregister_wait(self): self._unregister_wait_cb(None) - def cancel(self): + def cancel(self, msg=None): self._unregister_wait() - return super().cancel() + return super().cancel(msg=msg) def set_exception(self, exception): self._unregister_wait() diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index ee5edd5bd311f..ec00896cc620b 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -201,6 +201,27 @@ def test_uninitialized(self): self.assertFalse(fut.cancelled()) self.assertFalse(fut.done()) + def test_future_cancel_message_getter(self): + f = self._new_future(loop=self.loop) + self.assertTrue(hasattr(f, '_cancel_message')) + self.assertEqual(f._cancel_message, None) + + f.cancel('my message') + with self.assertRaises(asyncio.CancelledError): + self.loop.run_until_complete(f) + self.assertEqual(f._cancel_message, 'my message') + + def test_future_cancel_message_setter(self): + f = self._new_future(loop=self.loop) + f.cancel('my message') + f._cancel_message = 'my new message' + self.assertEqual(f._cancel_message, 'my new message') + + # Also check that the value is used for cancel(). + with self.assertRaises(asyncio.CancelledError): + self.loop.run_until_complete(f) + self.assertEqual(f._cancel_message, 'my new message') + def test_cancel(self): f = self._new_future(loop=self.loop) self.assertTrue(f.cancel()) diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 0f8d921c5bc7f..65bee526d2eca 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -103,6 +103,31 @@ def setUp(self): self.loop.set_task_factory(self.new_task) self.loop.create_future = lambda: self.new_future(self.loop) + def test_task_cancel_message_getter(self): + async def coro(): + pass + t = self.new_task(self.loop, coro()) + self.assertTrue(hasattr(t, '_cancel_message')) + self.assertEqual(t._cancel_message, None) + + t.cancel('my message') + with self.assertRaises(asyncio.CancelledError): + self.loop.run_until_complete(t) + self.assertEqual(t._cancel_message, 'my message') + + def test_task_cancel_message_setter(self): + async def coro(): + pass + t = self.new_task(self.loop, coro()) + t.cancel('my message') + t._cancel_message = 'my new message' + self.assertEqual(t._cancel_message, 'my new message') + + # Also check that the value is used for cancel(). + with self.assertRaises(asyncio.CancelledError): + self.loop.run_until_complete(t) + self.assertEqual(t._cancel_message, 'my new message') + def test_task_del_collect(self): class Evil: def __del__(self): @@ -520,6 +545,86 @@ async def task(): self.assertTrue(t.cancelled()) self.assertFalse(t.cancel()) + def test_cancel_with_message_then_future_result(self): + # Test Future.result() after calling cancel() with a message. + cases = [ + ((), ('',)), + ((None,), ('',)), + (('my message',), ('my message',)), + # Non-string values should roundtrip. + ((5,), (5,)), + ] + for cancel_args, expected_args in cases: + with self.subTest(cancel_args=cancel_args): + loop = asyncio.new_event_loop() + self.set_event_loop(loop) + + async def sleep(): + await asyncio.sleep(10) + + async def coro(): + task = self.new_task(loop, sleep()) + await asyncio.sleep(0) + task.cancel(*cancel_args) + done, pending = await asyncio.wait([task]) + task.result() + + task = self.new_task(loop, coro()) + with self.assertRaises(asyncio.CancelledError) as cm: + loop.run_until_complete(task) + exc = cm.exception + self.assertEqual(exc.args, expected_args) + + def test_cancel_with_message_then_future_exception(self): + # Test Future.exception() after calling cancel() with a message. + cases = [ + ((), ('',)), + ((None,), ('',)), + (('my message',), ('my message',)), + # Non-string values should roundtrip. + ((5,), (5,)), + ] + for cancel_args, expected_args in cases: + with self.subTest(cancel_args=cancel_args): + loop = asyncio.new_event_loop() + self.set_event_loop(loop) + + async def sleep(): + await asyncio.sleep(10) + + async def coro(): + task = self.new_task(loop, sleep()) + await asyncio.sleep(0) + task.cancel(*cancel_args) + done, pending = await asyncio.wait([task]) + task.exception() + + task = self.new_task(loop, coro()) + with self.assertRaises(asyncio.CancelledError) as cm: + loop.run_until_complete(task) + exc = cm.exception + self.assertEqual(exc.args, expected_args) + + def test_cancel_with_message_before_starting_task(self): + loop = asyncio.new_event_loop() + self.set_event_loop(loop) + + async def sleep(): + await asyncio.sleep(10) + + async def coro(): + task = self.new_task(loop, sleep()) + # We deliberately leave out the sleep here. + task.cancel('my message') + done, pending = await asyncio.wait([task]) + task.exception() + + task = self.new_task(loop, coro()) + with self.assertRaises(asyncio.CancelledError) as cm: + loop.run_until_complete(task) + exc = cm.exception + self.assertEqual(exc.args, ('my message',)) + def test_cancel_yield(self): with self.assertWarns(DeprecationWarning): @asyncio.coroutine @@ -2285,31 +2390,42 @@ def cancelling_callback(_): self.assertEqual(gather_task.result(), [42]) def test_cancel_gather_2(self): - loop = asyncio.new_event_loop() - self.addCleanup(loop.close) - - async def test(): - time = 0 - while True: - time += 0.05 - await asyncio.gather(asyncio.sleep(0.05), - return_exceptions=True, - loop=loop) - if time > 1: - return - - async def main(): - qwe = self.new_task(loop, test()) - await asyncio.sleep(0.2) - qwe.cancel() - try: - await qwe - except asyncio.CancelledError: - pass - else: - self.fail('gather did not propagate the cancellation request') - - loop.run_until_complete(main()) + cases = [ + ((), ('',)), + ((None,), ('',)), + (('my message',), ('my message',)), + # Non-string values should roundtrip. + ((5,), (5,)), + ] + for cancel_args, expected_args in cases: + with self.subTest(cancel_args=cancel_args): + + loop = asyncio.new_event_loop() + self.addCleanup(loop.close) + + async def test(): + time = 0 + while True: + time += 0.05 + await asyncio.gather(asyncio.sleep(0.05), + return_exceptions=True, + loop=loop) + if time > 1: + return + + async def main(): + qwe = self.new_task(loop, test()) + await asyncio.sleep(0.2) + qwe.cancel(*cancel_args) + try: + await qwe + except asyncio.CancelledError as exc: + self.assertEqual(exc.args, expected_args) + else: + self.fail('gather did not propagate the cancellation ' + 'request') + + loop.run_until_complete(main()) def test_exception_traceback(self): # See http://bugs.python.org/issue28843 diff --git a/Misc/NEWS.d/next/Library/2020-05-07-06-41-20.bpo-31033.waCj3n.rst b/Misc/NEWS.d/next/Library/2020-05-07-06-41-20.bpo-31033.waCj3n.rst new file mode 100644 index 0000000000000..e3d35a04aab51 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-07-06-41-20.bpo-31033.waCj3n.rst @@ -0,0 +1 @@ +Add a ``msg`` argument to :meth:`Future.cancel` and :meth:`Task.cancel`. diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index cc211a8895a8e..ff1b2b8b909c7 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -67,6 +67,7 @@ typedef enum { PyObject *prefix##_exception; \ PyObject *prefix##_result; \ PyObject *prefix##_source_tb; \ + PyObject *prefix##_cancel_msg; \ fut_state prefix##_state; \ int prefix##_log_tb; \ int prefix##_blocking; \ @@ -480,6 +481,7 @@ future_init(FutureObj *fut, PyObject *loop) Py_CLEAR(fut->fut_result); Py_CLEAR(fut->fut_exception); Py_CLEAR(fut->fut_source_tb); + Py_CLEAR(fut->fut_cancel_msg); fut->fut_state = STATE_PENDING; fut->fut_log_tb = 0; @@ -594,11 +596,33 @@ future_set_exception(FutureObj *fut, PyObject *exc) Py_RETURN_NONE; } +static PyObject * +create_cancelled_error(PyObject *msg) +{ + PyObject *exc; + if (msg == NULL || msg == Py_None) { + msg = PyUnicode_FromString(""); + exc = PyObject_CallOneArg(asyncio_CancelledError, msg); + Py_DECREF(msg); + } else { + exc = PyObject_CallOneArg(asyncio_CancelledError, msg); + } + return exc; +} + +static void +set_cancelled_error(PyObject *msg) +{ + PyObject *exc = create_cancelled_error(msg); + PyErr_SetObject(asyncio_CancelledError, exc); + Py_DECREF(exc); +} + static int future_get_result(FutureObj *fut, PyObject **result) { if (fut->fut_state == STATE_CANCELLED) { - PyErr_SetNone(asyncio_CancelledError); + set_cancelled_error(fut->fut_cancel_msg); return -1; } @@ -695,7 +719,7 @@ future_add_done_callback(FutureObj *fut, PyObject *arg, PyObject *ctx) } static PyObject * -future_cancel(FutureObj *fut) +future_cancel(FutureObj *fut, PyObject *msg) { fut->fut_log_tb = 0; @@ -704,6 +728,9 @@ future_cancel(FutureObj *fut) } fut->fut_state = STATE_CANCELLED; + Py_XINCREF(msg); + Py_XSETREF(fut->fut_cancel_msg, msg); + if (future_schedule_callbacks(fut) == -1) { return NULL; } @@ -749,6 +776,7 @@ FutureObj_clear(FutureObj *fut) Py_CLEAR(fut->fut_result); Py_CLEAR(fut->fut_exception); Py_CLEAR(fut->fut_source_tb); + Py_CLEAR(fut->fut_cancel_msg); Py_CLEAR(fut->dict); return 0; } @@ -763,6 +791,7 @@ FutureObj_traverse(FutureObj *fut, visitproc visit, void *arg) Py_VISIT(fut->fut_result); Py_VISIT(fut->fut_exception); Py_VISIT(fut->fut_source_tb); + Py_VISIT(fut->fut_cancel_msg); Py_VISIT(fut->dict); return 0; } @@ -828,7 +857,7 @@ _asyncio_Future_exception_impl(FutureObj *self) } if (self->fut_state == STATE_CANCELLED) { - PyErr_SetNone(asyncio_CancelledError); + set_cancelled_error(self->fut_cancel_msg); return NULL; } @@ -1029,6 +1058,8 @@ _asyncio_Future_remove_done_callback(FutureObj *self, PyObject *fn) /*[clinic input] _asyncio.Future.cancel + msg: object = None + Cancel the future and schedule callbacks. If the future is already done or cancelled, return False. Otherwise, @@ -1037,11 +1068,11 @@ return True. [clinic start generated code]*/ static PyObject * -_asyncio_Future_cancel_impl(FutureObj *self) -/*[clinic end generated code: output=e45b932ba8bd68a1 input=515709a127995109]*/ +_asyncio_Future_cancel_impl(FutureObj *self, PyObject *msg) +/*[clinic end generated code: output=3edebbc668e5aba3 input=925eb545251f2c5a]*/ { ENSURE_FUTURE_ALIVE(self) - return future_cancel(self); + return future_cancel(self, msg); } /*[clinic input] @@ -1254,6 +1285,29 @@ FutureObj_get_source_traceback(FutureObj *fut, void *Py_UNUSED(ignored)) return fut->fut_source_tb; } +static PyObject * +FutureObj_get_cancel_message(FutureObj *fut, void *Py_UNUSED(ignored)) +{ + if (fut->fut_cancel_msg == NULL) { + Py_RETURN_NONE; + } + Py_INCREF(fut->fut_cancel_msg); + return fut->fut_cancel_msg; +} + +static int +FutureObj_set_cancel_message(FutureObj *fut, PyObject *msg, + void *Py_UNUSED(ignored)) +{ + if (msg == NULL) { + PyErr_SetString(PyExc_AttributeError, "cannot delete attribute"); + return -1; + } + Py_INCREF(msg); + Py_XSETREF(fut->fut_cancel_msg, msg); + return 0; +} + static PyObject * FutureObj_get_state(FutureObj *fut, void *Py_UNUSED(ignored)) { @@ -1422,7 +1476,10 @@ static PyMethodDef FutureType_methods[] = { {"_exception", (getter)FutureObj_get_exception, NULL, NULL}, \ {"_log_traceback", (getter)FutureObj_get_log_traceback, \ (setter)FutureObj_set_log_traceback, NULL}, \ - {"_source_traceback", (getter)FutureObj_get_source_traceback, NULL, NULL}, + {"_source_traceback", (getter)FutureObj_get_source_traceback, \ + NULL, NULL}, \ + {"_cancel_message", (getter)FutureObj_get_cancel_message, \ + (setter)FutureObj_set_cancel_message, NULL}, static PyGetSetDef FutureType_getsetlist[] = { FUTURE_COMMON_GETSETLIST @@ -2189,6 +2246,8 @@ _asyncio_Task__repr_info_impl(TaskObj *self) /*[clinic input] _asyncio.Task.cancel + msg: object = None + Request that this task cancel itself. This arranges for a CancelledError to be thrown into the @@ -2210,8 +2269,8 @@ was not called). [clinic start generated code]*/ static PyObject * -_asyncio_Task_cancel_impl(TaskObj *self) -/*[clinic end generated code: output=6bfc0479da9d5757 input=13f9bf496695cb52]*/ +_asyncio_Task_cancel_impl(TaskObj *self, PyObject *msg) +/*[clinic end generated code: output=c66b60d41c74f9f1 input=f4ff8e8ffc5f1c00]*/ { self->task_log_tb = 0; @@ -2223,7 +2282,8 @@ _asyncio_Task_cancel_impl(TaskObj *self) PyObject *res; int is_true; - res = _PyObject_CallMethodIdNoArgs(self->task_fut_waiter, &PyId_cancel); + res = _PyObject_CallMethodIdOneArg(self->task_fut_waiter, + &PyId_cancel, msg); if (res == NULL) { return NULL; } @@ -2240,6 +2300,8 @@ _asyncio_Task_cancel_impl(TaskObj *self) } self->task_must_cancel = 1; + Py_XINCREF(msg); + Py_XSETREF(self->task_cancel_msg, msg); Py_RETURN_TRUE; } @@ -2623,7 +2685,8 @@ task_step_impl(TaskObj *task, PyObject *exc) if (!exc) { /* exc was not a CancelledError */ - exc = PyObject_CallNoArgs(asyncio_CancelledError); + exc = create_cancelled_error(task->task_cancel_msg); + if (!exc) { goto fail; } @@ -2672,7 +2735,7 @@ task_step_impl(TaskObj *task, PyObject *exc) if (task->task_must_cancel) { // Task is cancelled right before coro stops. task->task_must_cancel = 0; - res = future_cancel((FutureObj*)task); + res = future_cancel((FutureObj*)task, task->task_cancel_msg); } else { res = future_set_result((FutureObj*)task, o); @@ -2689,8 +2752,26 @@ task_step_impl(TaskObj *task, PyObject *exc) if (PyErr_ExceptionMatches(asyncio_CancelledError)) { /* CancelledError */ - PyErr_Clear(); - return future_cancel((FutureObj*)task); + PyErr_Fetch(&et, &ev, &tb); + + PyObject *cancel_msg; + if (ev != NULL && PyExceptionInstance_Check(ev)) { + PyObject *exc_args = ((PyBaseExceptionObject*)ev)->args; + Py_ssize_t size = PyTuple_GET_SIZE(exc_args); + if (size > 0) { + cancel_msg = PyTuple_GET_ITEM(exc_args, 0); + } else { + cancel_msg = NULL; + } + } else { + cancel_msg = ev; + } + + Py_DECREF(et); + Py_XDECREF(tb); + Py_XDECREF(ev); + + return future_cancel((FutureObj*)task, cancel_msg); } /* Some other exception; pop it and call Task.set_exception() */ @@ -2770,7 +2851,8 @@ task_step_impl(TaskObj *task, PyObject *exc) if (task->task_must_cancel) { PyObject *r; int is_true; - r = _PyObject_CallMethodIdNoArgs(result, &PyId_cancel); + r = _PyObject_CallMethodIdOneArg(result, &PyId_cancel, + task->task_cancel_msg); if (r == NULL) { return NULL; } @@ -2861,7 +2943,8 @@ task_step_impl(TaskObj *task, PyObject *exc) if (task->task_must_cancel) { PyObject *r; int is_true; - r = _PyObject_CallMethodIdNoArgs(result, &PyId_cancel); + r = _PyObject_CallMethodIdOneArg(result, &PyId_cancel, + task->task_cancel_msg); if (r == NULL) { return NULL; } diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h index 17eb77334d0a7..3f5023c33a580 100644 --- a/Modules/clinic/_asynciomodule.c.h +++ b/Modules/clinic/_asynciomodule.c.h @@ -174,7 +174,7 @@ PyDoc_STRVAR(_asyncio_Future_remove_done_callback__doc__, {"remove_done_callback", (PyCFunction)_asyncio_Future_remove_done_callback, METH_O, _asyncio_Future_remove_done_callback__doc__}, PyDoc_STRVAR(_asyncio_Future_cancel__doc__, -"cancel($self, /)\n" +"cancel($self, /, msg=None)\n" "--\n" "\n" "Cancel the future and schedule callbacks.\n" @@ -184,15 +184,34 @@ PyDoc_STRVAR(_asyncio_Future_cancel__doc__, "return True."); #define _ASYNCIO_FUTURE_CANCEL_METHODDEF \ - {"cancel", (PyCFunction)_asyncio_Future_cancel, METH_NOARGS, _asyncio_Future_cancel__doc__}, + {"cancel", (PyCFunction)(void(*)(void))_asyncio_Future_cancel, METH_FASTCALL|METH_KEYWORDS, _asyncio_Future_cancel__doc__}, static PyObject * -_asyncio_Future_cancel_impl(FutureObj *self); +_asyncio_Future_cancel_impl(FutureObj *self, PyObject *msg); static PyObject * -_asyncio_Future_cancel(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future_cancel(FutureObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _asyncio_Future_cancel_impl(self); + PyObject *return_value = NULL; + static const char * const _keywords[] = {"msg", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "cancel", 0}; + PyObject *argsbuf[1]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *msg = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + msg = args[0]; +skip_optional_pos: + return_value = _asyncio_Future_cancel_impl(self, msg); + +exit: + return return_value; } PyDoc_STRVAR(_asyncio_Future_cancelled__doc__, @@ -413,7 +432,7 @@ _asyncio_Task__repr_info(TaskObj *self, PyObject *Py_UNUSED(ignored)) } PyDoc_STRVAR(_asyncio_Task_cancel__doc__, -"cancel($self, /)\n" +"cancel($self, /, msg=None)\n" "--\n" "\n" "Request that this task cancel itself.\n" @@ -436,15 +455,34 @@ PyDoc_STRVAR(_asyncio_Task_cancel__doc__, "was not called)."); #define _ASYNCIO_TASK_CANCEL_METHODDEF \ - {"cancel", (PyCFunction)_asyncio_Task_cancel, METH_NOARGS, _asyncio_Task_cancel__doc__}, + {"cancel", (PyCFunction)(void(*)(void))_asyncio_Task_cancel, METH_FASTCALL|METH_KEYWORDS, _asyncio_Task_cancel__doc__}, static PyObject * -_asyncio_Task_cancel_impl(TaskObj *self); +_asyncio_Task_cancel_impl(TaskObj *self, PyObject *msg); static PyObject * -_asyncio_Task_cancel(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_cancel(TaskObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _asyncio_Task_cancel_impl(self); + PyObject *return_value = NULL; + static const char * const _keywords[] = {"msg", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "cancel", 0}; + PyObject *argsbuf[1]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *msg = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + msg = args[0]; +skip_optional_pos: + return_value = _asyncio_Task_cancel_impl(self, msg); + +exit: + return return_value; } PyDoc_STRVAR(_asyncio_Task_get_stack__doc__, @@ -832,4 +870,4 @@ _asyncio__leave_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, exit: return return_value; } -/*[clinic end generated code: output=585ba1f8de5b4103 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6ed4cfda8fc516ad input=a9049054013a1b77]*/ From webhook-mailer at python.org Fri May 15 21:28:02 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Sat, 16 May 2020 01:28:02 -0000 Subject: [Python-checkins] bpo-39075: types.SimpleNamespace no longer sorts attributes in its repr (GH-19430) Message-ID: https://github.com/python/cpython/commit/6b6092f533f0e4787b8564c4fad6ec6d1018af0d commit: 6b6092f533f0e4787b8564c4fad6ec6d1018af0d branch: master author: Zackery Spytz committer: GitHub date: 2020-05-15T18:27:54-07:00 summary: bpo-39075: types.SimpleNamespace no longer sorts attributes in its repr (GH-19430) files: A Misc/NEWS.d/next/Library/2020-04-07-23-44-06.bpo-39075.hgck3j.rst M Doc/library/types.rst M Lib/test/test_types.py M Objects/namespaceobject.c diff --git a/Doc/library/types.rst b/Doc/library/types.rst index cdddb46783a47..79acdf4499afd 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -355,8 +355,7 @@ Additional Utility Classes and Functions self.__dict__.update(kwargs) def __repr__(self): - keys = sorted(self.__dict__) - items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys) + items = (f"{k}={v!r}" for k, v in self.__dict__.items()) return "{}({})".format(type(self).__name__, ", ".join(items)) def __eq__(self, other): @@ -368,6 +367,9 @@ Additional Utility Classes and Functions .. versionadded:: 3.3 + .. versionchanged:: 3.9 + Attribute order in the repr changed from alphabetical to insertion (like + ``dict``). .. function:: DynamicClassAttribute(fget=None, fset=None, fdel=None, doc=None) diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index 28ebfb6e603e3..49dc5bf40e3ed 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -1262,8 +1262,8 @@ def test_repr(self): ns2._y = 5 name = "namespace" - self.assertEqual(repr(ns1), "{name}(w=3, x=1, y=2)".format(name=name)) - self.assertEqual(repr(ns2), "{name}(_y=5, x='spam')".format(name=name)) + self.assertEqual(repr(ns1), "{name}(x=1, y=2, w=3)".format(name=name)) + self.assertEqual(repr(ns2), "{name}(x='spam', _y=5)".format(name=name)) def test_equal(self): ns1 = types.SimpleNamespace(x=1) @@ -1312,7 +1312,7 @@ def test_recursive_repr(self): ns3.spam = ns2 name = "namespace" repr1 = "{name}(c='cookie', spam={name}(...))".format(name=name) - repr2 = "{name}(spam={name}(spam={name}(...), x=1))".format(name=name) + repr2 = "{name}(spam={name}(x=1, spam={name}(...)))".format(name=name) self.assertEqual(repr(ns1), repr1) self.assertEqual(repr(ns2), repr2) diff --git a/Misc/NEWS.d/next/Library/2020-04-07-23-44-06.bpo-39075.hgck3j.rst b/Misc/NEWS.d/next/Library/2020-04-07-23-44-06.bpo-39075.hgck3j.rst new file mode 100644 index 0000000000000..c447a191f07f3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-07-23-44-06.bpo-39075.hgck3j.rst @@ -0,0 +1,2 @@ +The repr for :class:`types.SimpleNamespace` is now insertion ordered rather +than alphabetical. diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c index 29141a81d71ec..fa37ed250d30a 100644 --- a/Objects/namespaceobject.c +++ b/Objects/namespaceobject.c @@ -91,8 +91,6 @@ namespace_repr(PyObject *ns) keys = PyDict_Keys(d); if (keys == NULL) goto error; - if (PyList_Sort(keys) != 0) - goto error; keys_iter = PyObject_GetIter(keys); if (keys_iter == NULL) From webhook-mailer at python.org Fri May 15 21:33:09 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Sat, 16 May 2020 01:33:09 -0000 Subject: [Python-checkins] bpo-40457: Support OpenSSL without TLS 1.0/1.1 (GH-19862) Message-ID: https://github.com/python/cpython/commit/6e8cda91d92da72800d891b2fc2073ecbc134d98 commit: 6e8cda91d92da72800d891b2fc2073ecbc134d98 branch: master author: Christian Heimes committer: GitHub date: 2020-05-15T18:33:05-07:00 summary: bpo-40457: Support OpenSSL without TLS 1.0/1.1 (GH-19862) OpenSSL can be build without support for TLS 1.0 and 1.1. The ssl module now correctly adheres to OPENSSL_NO_TLS1 and OPENSSL_NO_TLS1_1 flags. Also update multissltest to test with latest OpenSSL and LibreSSL releases. Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran files: A Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst M Modules/_ssl.c M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst b/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst new file mode 100644 index 0000000000000..19b6dd685cd8c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst @@ -0,0 +1 @@ +The ssl module now support OpenSSL builds without TLS 1.0 and 1.1 methods. diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 987a99178775d..5fe65a8a1d6df 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -147,15 +147,6 @@ static void _PySSLFixErrno(void) { # define PY_OPENSSL_1_1_API 1 #endif -/* Openssl comes with TLSv1.1 and TLSv1.2 between 1.0.0h and 1.0.1 - http://www.openssl.org/news/changelog.html - */ -#if OPENSSL_VERSION_NUMBER >= 0x10001000L -# define HAVE_TLSv1_2 1 -#else -# define HAVE_TLSv1_2 0 -#endif - /* SNI support (client- and server-side) appeared in OpenSSL 1.0.0 and 0.9.8f * This includes the SSL_set_SSL_CTX() function. */ @@ -326,13 +317,9 @@ enum py_ssl_version { PY_SSL_VERSION_SSL2, PY_SSL_VERSION_SSL3=1, PY_SSL_VERSION_TLS, /* SSLv23 */ -#if HAVE_TLSv1_2 PY_SSL_VERSION_TLS1, PY_SSL_VERSION_TLS1_1, PY_SSL_VERSION_TLS1_2, -#else - PY_SSL_VERSION_TLS1, -#endif PY_SSL_VERSION_TLS_CLIENT=0x10, PY_SSL_VERSION_TLS_SERVER, }; @@ -3086,35 +3073,45 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) #endif PySSL_BEGIN_ALLOW_THREADS - if (proto_version == PY_SSL_VERSION_TLS1) + switch(proto_version) { +#if defined(SSL3_VERSION) && !defined(OPENSSL_NO_SSL3) + case PY_SSL_VERSION_SSL3: + ctx = SSL_CTX_new(SSLv3_method()); + break; +#endif +#if defined(TLS1_VERSION) && !defined(OPENSSL_NO_TLS1) + case PY_SSL_VERSION_TLS1: ctx = SSL_CTX_new(TLSv1_method()); -#if HAVE_TLSv1_2 - else if (proto_version == PY_SSL_VERSION_TLS1_1) - ctx = SSL_CTX_new(TLSv1_1_method()); - else if (proto_version == PY_SSL_VERSION_TLS1_2) - ctx = SSL_CTX_new(TLSv1_2_method()); + break; #endif -#ifndef OPENSSL_NO_SSL3 - else if (proto_version == PY_SSL_VERSION_SSL3) - ctx = SSL_CTX_new(SSLv3_method()); +#if defined(TLS1_1_VERSION) && !defined(OPENSSL_NO_TLS1_1) + case PY_SSL_VERSION_TLS1_1: + ctx = SSL_CTX_new(TLSv1_1_method()); + break; #endif -#ifndef OPENSSL_NO_SSL2 - else if (proto_version == PY_SSL_VERSION_SSL2) - ctx = SSL_CTX_new(SSLv2_method()); +#if defined(TLS1_2_VERSION) && !defined(OPENSSL_NO_TLS1_2) + case PY_SSL_VERSION_TLS1_2: + ctx = SSL_CTX_new(TLSv1_2_method()); + break; #endif - else if (proto_version == PY_SSL_VERSION_TLS) /* SSLv23 */ + case PY_SSL_VERSION_TLS: + /* SSLv23 */ ctx = SSL_CTX_new(TLS_method()); - else if (proto_version == PY_SSL_VERSION_TLS_CLIENT) + break; + case PY_SSL_VERSION_TLS_CLIENT: ctx = SSL_CTX_new(TLS_client_method()); - else if (proto_version == PY_SSL_VERSION_TLS_SERVER) + break; + case PY_SSL_VERSION_TLS_SERVER: ctx = SSL_CTX_new(TLS_server_method()); - else + break; + default: proto_version = -1; + } PySSL_END_ALLOW_THREADS if (proto_version == -1) { PyErr_SetString(PyExc_ValueError, - "invalid protocol version"); + "invalid or unsupported protocol version"); return NULL; } if (ctx == NULL) { @@ -6185,12 +6182,10 @@ PyInit__ssl(void) PY_SSL_VERSION_TLS_SERVER); PyModule_AddIntConstant(m, "PROTOCOL_TLSv1", PY_SSL_VERSION_TLS1); -#if HAVE_TLSv1_2 PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_1", PY_SSL_VERSION_TLS1_1); PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_2", PY_SSL_VERSION_TLS1_2); -#endif /* protocol options */ PyModule_AddIntConstant(m, "OP_ALL", @@ -6198,10 +6193,8 @@ PyInit__ssl(void) PyModule_AddIntConstant(m, "OP_NO_SSLv2", SSL_OP_NO_SSLv2); PyModule_AddIntConstant(m, "OP_NO_SSLv3", SSL_OP_NO_SSLv3); PyModule_AddIntConstant(m, "OP_NO_TLSv1", SSL_OP_NO_TLSv1); -#if HAVE_TLSv1_2 PyModule_AddIntConstant(m, "OP_NO_TLSv1_1", SSL_OP_NO_TLSv1_1); PyModule_AddIntConstant(m, "OP_NO_TLSv1_2", SSL_OP_NO_TLSv1_2); -#endif #ifdef SSL_OP_NO_TLSv1_3 PyModule_AddIntConstant(m, "OP_NO_TLSv1_3", SSL_OP_NO_TLSv1_3); #else diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 0e37ec1bba93b..12af98d12c45d 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -43,20 +43,21 @@ log = logging.getLogger("multissl") OPENSSL_OLD_VERSIONS = [ + "1.0.2u", + "1.1.0l", ] OPENSSL_RECENT_VERSIONS = [ - "1.0.2u", - "1.1.0l", "1.1.1g", # "3.0.0-alpha2" ] LIBRESSL_OLD_VERSIONS = [ + "2.9.2", ] LIBRESSL_RECENT_VERSIONS = [ - "2.9.2", + "3.1.0", ] # store files in ../multissl @@ -80,7 +81,7 @@ parser.add_argument( '--disable-ancient', action='store_true', - help="Don't test OpenSSL < 1.0.2 and LibreSSL < 2.5.3.", + help="Don't test OpenSSL and LibreSSL versions without upstream support", ) parser.add_argument( '--openssl', From webhook-mailer at python.org Sat May 16 04:20:11 2020 From: webhook-mailer at python.org (Paul Ganssle) Date: Sat, 16 May 2020 08:20:11 -0000 Subject: [Python-checkins] bpo-40503: PEP 615: Tests and implementation for zoneinfo (GH-19909) Message-ID: https://github.com/python/cpython/commit/62972d9d73e83d6eea157617cc69500ffec9e3f0 commit: 62972d9d73e83d6eea157617cc69500ffec9e3f0 branch: master author: Paul Ganssle committer: GitHub date: 2020-05-16T10:20:06+02:00 summary: bpo-40503: PEP 615: Tests and implementation for zoneinfo (GH-19909) This is the initial implementation of PEP 615, the zoneinfo module, ported from the standalone reference implementation (see https://www.python.org/dev/peps/pep-0615/#reference-implementation for a link, which has a more detailed commit history). This includes (hopefully) all functional elements described in the PEP, but documentation is found in a separate PR. This includes: 1. A pure python implementation of the ZoneInfo class 2. A C accelerated implementation of the ZoneInfo class 3. Tests with 100% branch coverage for the Python code (though C code coverage is less than 100%). 4. A compile-time configuration option on Linux (though not on Windows) Differences from the reference implementation: - The module is arranged slightly differently: the accelerated module is `_zoneinfo` rather than `zoneinfo._czoneinfo`, which also necessitates some changes in the test support function. (Suggested by Victor Stinner and Steve Dower.) - The tests are arranged slightly differently and do not include the property tests. The tests live at test/test_zoneinfo/test_zoneinfo.py rather than test/test_zoneinfo.py or test/test_zoneinfo/__init__.py because we may do some refactoring in the future that would likely require this separation anyway; we may: - include the property tests - automatically run all the tests against both pure Python and C, rather than manually constructing C and Python test classes (similar to the way this works with test_datetime.py, which generates C and Python test cases from datetimetester.py). - This includes a compile-time configuration option on Linux (though not on Windows); added with much help from Thomas Wouters. - Integration into the CPython build system is obviously different from building a standalone zoneinfo module wheel. - This includes configuration to install the tzdata package as part of CI, though only on the coverage jobs. Introducing a PyPI dependency as part of the CI build was controversial, and this is seen as less of a major change, since the coverage jobs already depend on pip and PyPI. Additional changes that were introduced as part of this PR, most / all of which were backported to the reference implementation: - Fixed reference and memory leaks With much debugging help from Pablo Galindo - Added smoke tests ensuring that the C and Python modules are built The import machinery can be somewhat fragile, and the "seamlessly falls back to pure Python" nature of this module makes it so that a problem building the C extension or a failure to import the pure Python version might easily go unnoticed. - Adjustments to zoneinfo.__dir__ Suggested by Petr Viktorin. - Slight refactorings as suggested by Steve Dower. - Removed unnecessary if check on std_abbr Discovered this because of a missing line in branch coverage. files: A Lib/test/test_zoneinfo/__init__.py A Lib/test/test_zoneinfo/__main__.py A Lib/test/test_zoneinfo/_support.py A Lib/test/test_zoneinfo/data/update_test_data.py A Lib/test/test_zoneinfo/data/zoneinfo_data.json A Lib/test/test_zoneinfo/test_zoneinfo.py A Lib/zoneinfo/__init__.py A Lib/zoneinfo/_common.py A Lib/zoneinfo/_tzpath.py A Lib/zoneinfo/_zoneinfo.py A Misc/requirements-test.txt A Modules/_zoneinfo.c A PCbuild/_zoneinfo.vcxproj A PCbuild/_zoneinfo.vcxproj.filters M .github/workflows/coverage.yml M .travis.yml M Lib/sysconfig.py M Makefile.pre.in M Modules/Setup M PCbuild/lib.pyproj M PCbuild/pcbuild.proj M PCbuild/pcbuild.sln M PCbuild/readme.txt M Tools/msi/lib/lib_files.wxs M configure M configure.ac M setup.py diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 75bdf83f6c5db..6dd973bf8e4ad 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -48,6 +48,7 @@ jobs: ./python -m venv .venv source ./.venv/bin/activate python -m pip install -U coverage + python -m pip install -r Misc/requirements-test.txt python -m test.pythoninfo - name: 'Tests with coverage' run: > diff --git a/.travis.yml b/.travis.yml index 3c2fb4bdc7875..133385fbf5c71 100644 --- a/.travis.yml +++ b/.travis.yml @@ -87,6 +87,7 @@ matrix: # Need a venv that can parse covered code. - ./python -m venv venv - ./venv/bin/python -m pip install -U coverage + - ./venv/bin/python -m pip install -r Misc/requirements-test.txt - ./venv/bin/python -m test.pythoninfo script: # Skip tests that re-run the entire test suite. diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py index eaee837f10e33..bf04ac541e6b0 100644 --- a/Lib/sysconfig.py +++ b/Lib/sysconfig.py @@ -546,6 +546,7 @@ def get_config_vars(*args): if os.name == 'nt': _init_non_posix(_CONFIG_VARS) + _CONFIG_VARS['TZPATH'] = '' if os.name == 'posix': _init_posix(_CONFIG_VARS) # For backward compatibility, see issue19555 diff --git a/Lib/test/test_zoneinfo/__init__.py b/Lib/test/test_zoneinfo/__init__.py new file mode 100644 index 0000000000000..98cc4412ae16c --- /dev/null +++ b/Lib/test/test_zoneinfo/__init__.py @@ -0,0 +1 @@ +from .test_zoneinfo import * diff --git a/Lib/test/test_zoneinfo/__main__.py b/Lib/test/test_zoneinfo/__main__.py new file mode 100644 index 0000000000000..5cc4e055d5e66 --- /dev/null +++ b/Lib/test/test_zoneinfo/__main__.py @@ -0,0 +1,3 @@ +import unittest + +unittest.main('test.test_zoneinfo') diff --git a/Lib/test/test_zoneinfo/_support.py b/Lib/test/test_zoneinfo/_support.py new file mode 100644 index 0000000000000..6bd8d8dc0fbfe --- /dev/null +++ b/Lib/test/test_zoneinfo/_support.py @@ -0,0 +1,76 @@ +import contextlib +import functools +import sys +import threading +import unittest +from test.support import import_fresh_module + +OS_ENV_LOCK = threading.Lock() +TZPATH_LOCK = threading.Lock() +TZPATH_TEST_LOCK = threading.Lock() + + +def call_once(f): + """Decorator that ensures a function is only ever called once.""" + lock = threading.Lock() + cached = functools.lru_cache(None)(f) + + @functools.wraps(f) + def inner(): + with lock: + return cached() + + return inner + + + at call_once +def get_modules(): + """Retrieve two copies of zoneinfo: pure Python and C accelerated. + + Because this function manipulates the import system in a way that might + be fragile or do unexpected things if it is run many times, it uses a + `call_once` decorator to ensure that this is only ever called exactly + one time ? in other words, when using this function you will only ever + get one copy of each module rather than a fresh import each time. + """ + import zoneinfo as c_module + + py_module = import_fresh_module("zoneinfo", blocked=["_zoneinfo"]) + + return py_module, c_module + + + at contextlib.contextmanager +def set_zoneinfo_module(module): + """Make sure sys.modules["zoneinfo"] refers to `module`. + + This is necessary because `pickle` will refuse to serialize + an type calling itself `zoneinfo.ZoneInfo` unless `zoneinfo.ZoneInfo` + refers to the same object. + """ + + NOT_PRESENT = object() + old_zoneinfo = sys.modules.get("zoneinfo", NOT_PRESENT) + sys.modules["zoneinfo"] = module + yield + if old_zoneinfo is not NOT_PRESENT: + sys.modules["zoneinfo"] = old_zoneinfo + else: # pragma: nocover + sys.modules.pop("zoneinfo") + + +class ZoneInfoTestBase(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.klass = cls.module.ZoneInfo + super().setUpClass() + + @contextlib.contextmanager + def tzpath_context(self, tzpath, lock=TZPATH_LOCK): + with lock: + old_path = self.module.TZPATH + try: + self.module.reset_tzpath(tzpath) + yield + finally: + self.module.reset_tzpath(old_path) diff --git a/Lib/test/test_zoneinfo/data/update_test_data.py b/Lib/test/test_zoneinfo/data/update_test_data.py new file mode 100644 index 0000000000000..f531ab316a1f2 --- /dev/null +++ b/Lib/test/test_zoneinfo/data/update_test_data.py @@ -0,0 +1,122 @@ +""" +Script to automatically generate a JSON file containing time zone information. + +This is done to allow "pinning" a small subset of the tzdata in the tests, +since we are testing properties of a file that may be subject to change. For +example, the behavior in the far future of any given zone is likely to change, +but "does this give the right answer for this file in 2040" is still an +important property to test. + +This must be run from a computer with zoneinfo data installed. +""" +from __future__ import annotations + +import base64 +import functools +import json +import lzma +import pathlib +import textwrap +import typing + +import zoneinfo + +KEYS = [ + "Africa/Abidjan", + "Africa/Casablanca", + "America/Los_Angeles", + "America/Santiago", + "Asia/Tokyo", + "Australia/Sydney", + "Europe/Dublin", + "Europe/Lisbon", + "Europe/London", + "Pacific/Kiritimati", + "UTC", +] + +TEST_DATA_LOC = pathlib.Path(__file__).parent + + + at functools.lru_cache(maxsize=None) +def get_zoneinfo_path() -> pathlib.Path: + """Get the first zoneinfo directory on TZPATH containing the "UTC" zone.""" + key = "UTC" + for path in map(pathlib.Path, zoneinfo.TZPATH): + if (path / key).exists(): + return path + else: + raise OSError("Cannot find time zone data.") + + +def get_zoneinfo_metadata() -> typing.Dict[str, str]: + path = get_zoneinfo_path() + + tzdata_zi = path / "tzdata.zi" + if not tzdata_zi.exists(): + # tzdata.zi is necessary to get the version information + raise OSError("Time zone data does not include tzdata.zi.") + + with open(tzdata_zi, "r") as f: + version_line = next(f) + + _, version = version_line.strip().rsplit(" ", 1) + + if ( + not version[0:4].isdigit() + or len(version) < 5 + or not version[4:].isalpha() + ): + raise ValueError( + "Version string should be YYYYx, " + + "where YYYY is the year and x is a letter; " + + f"found: {version}" + ) + + return {"version": version} + + +def get_zoneinfo(key: str) -> bytes: + path = get_zoneinfo_path() + + with open(path / key, "rb") as f: + return f.read() + + +def encode_compressed(data: bytes) -> typing.List[str]: + compressed_zone = lzma.compress(data) + raw = base64.b85encode(compressed_zone) + + raw_data_str = raw.decode("utf-8") + + data_str = textwrap.wrap(raw_data_str, width=70) + return data_str + + +def load_compressed_keys() -> typing.Dict[str, typing.List[str]]: + output = {key: encode_compressed(get_zoneinfo(key)) for key in KEYS} + + return output + + +def update_test_data(fname: str = "zoneinfo_data.json") -> None: + TEST_DATA_LOC.mkdir(exist_ok=True, parents=True) + + # Annotation required: https://github.com/python/mypy/issues/8772 + json_kwargs: typing.Dict[str, typing.Any] = dict( + indent=2, sort_keys=True, + ) + + compressed_keys = load_compressed_keys() + metadata = get_zoneinfo_metadata() + output = { + "metadata": metadata, + "data": compressed_keys, + } + + with open(TEST_DATA_LOC / fname, "w") as f: + json.dump(output, f, **json_kwargs) + + +if __name__ == "__main__": + update_test_data() diff --git a/Lib/test/test_zoneinfo/data/zoneinfo_data.json b/Lib/test/test_zoneinfo/data/zoneinfo_data.json new file mode 100644 index 0000000000000..ec4414a0cdedb --- /dev/null +++ b/Lib/test/test_zoneinfo/data/zoneinfo_data.json @@ -0,0 +1,190 @@ +{ + "data": { + "Africa/Abidjan": [ + "{Wp48S^xk9=GL at E0stWa761SMbT8$j-~f{VGF<>F7KxBg5R*{Ksocg8-YYVul=v7vZzaHN", + "uC=da5UI2rH18c!OnjV{y4u(+A!!VBKmY&$ORw>7UO^(500B;v0RR91bXh%WvBYQl0ssI2", + "00dcD" + ], + "Africa/Casablanca": [ + "{Wp48S^xk9=GL at E0stWa761SMbT8$j;0b&Kz+C_;7KxBg5R*{N&yjMUR~;C-fDaSOU;q-~", + "FqW+4{YBjbcw}`a!dW>b)R2-0a+uwf`P3{_Y at HuCz}S$J$ZJ>R_V<~|Fk>sgX4=%0vUrh-", + "lt at YP^Wrus;j?`Th#xRPzf<<~Hp4DH^gZX>d{+WOp~HNu8!{uWu}&XphAd{j1;rB4|9?R!", + "pqruAFUMt8#*WcrVS{;kLlY(cJRV$w?d2car%Rs>q9BgTU4", + "Ht-tQKZ7Z`9QqOb?R#b%z?rk>!CkH7jy3wja4NG2q)H}fNRKg8v{);Em;K3Cncf4C6&Oaj", + "V+DbX%o4+)CV3+e!Lm6dutu(0BQpH1T?W(~cQtKV*^_Pdx!LirjpTs?Bmt at vktjLq4;)O!", + "rrly=c*rwTwMJFd0I57`hgkc?=nyI4RZf9W$6DCWugmf&)wk^tWH17owj=#PGH7Xv-?9$j", + "njwDlkOE+BFNR9YXEmBpO;rqEw=e2IR-8^(W;8ma?M3JVd($2T>IW+0tk|Gm8>ftukRQ9J", + "8k3brzqMnVyjsLI-CKneFa)Lxvp_aq40f}0J3VVoWL5rox", + "`Kptivcp}o5xA^@>qNI%?zo=Yj4AMV?kbAA)j(1%)+Pp)bSn+7Yk`M{oE}L-Z!G6OMr5G+h", + "p)$3Lg{ono{4cN>Vr&>L4kXH;_VnBL5U!LgzqE%P7QQ*tue}O`3(TZ0`aKn&~8trOQ-rBXCp)f at P6RMO4l0+;b|5-pk9_ryNh}Zc*v%mvz_#", + "yd6fjB0g9{MmMnu8bG%#C~ugXK^S^k@?ab#", + "O|aE>dDTt4s4n69(~@t~!wniV%g7khFx~I*4>Y|V$4j5%KPF*-FyKIi@!Ho&", + "x8QQsksYt8)D+W)Ni!=G`ogSu^vLL-l#7A7=iIAKL2SuZk9F}NfNk86VI)9WZE?%2wC-ya", + "F~z#Qsq)LH0|_D8^5fU8X%GeQ4TB>R-dlziA&tZe&1ada208!$nk`7bOFO2S00G`Z at 1A~t&lyL{p{eM{5)QGf7Mo5FW9==mlyXJt2", + "UwpntR7H0eSq!(aYq#aqUz&RM*tvuMI)AsM?K3-dV3-TT{t)!Iy#JTo=tXkzAM9~j2YbiO", + "ls3(H8Dc>Y|D1aqL51vjLbpYG;GvGTQB4bXuJ%mA;(B4eUpu$$@zv2vVcq-Y)VKbzp^tei", + "uzy}R{LuvDjpuVb`79O+CBmg{Wx!bvx$eu4zRE&", + "PehMb=&G<9$>iZ|bFE)0=4I?KLFGBC0I(0_svgw0%FiMsT%koo*!nEYc6GY at QnU}&4Isg;", + "l=|khi(!VaiSE2=Ny`&&tpi~~;{$uN}%f|7mBhAy;s3YT^sy!$eG~?`9mNJC9 at 4Bac_p^BZh)Yd_rWW5qh-?tKY(>5VHO", + "L*iT8P at wCavLj^yYbnDR+4ukhS+xPrpl)iqB?u)bj9a2aW==g6G3lCJd>(+Blfr)~^40F4f>cRZ^UF;RibfZ>0m73hR", + "C{$vTfC(STN`g7(B<=Z2556{}0`?p&|Akkst!4Xy4OT;A at c$XTUI3FRRjy*KA7uC56FD)z", + "^X{WV*sr(w!c$W357o!&eLO2wTDNOyw at gf(&R<t;=-Tu1TV{>%8ZVATC9tjD8|(&`$9YHvZ9bVe#>w", + "|8c;Tg|xE&)`*}LwM*E}q}q8^Qja%p`_U)*5DdLI9O@!e=3jFjOCrCq28b_bb;s>%D#iJB", + "CWJi{JH!Js;6nfayos$kq^OEX00HO-lokL0!mqm{vBYQl0ssI200dcD" + ], + "America/Santiago": [ + "{Wp48S^xk9=GL at E0stWa761SMbT8$j;0fRZ<6QtM7KxBg84(fsEAUJ$J{f-TXlPEUec5Ee", + "n+hsD4lC(QYax=JdSpoyje8%VM`GW}{bJ8 at y$A8O&*$pw{(f~Os#}2w", + "eX6^Rgi$IT%n^V^85L>$_c7{cB^#ogV=rHBJGiz-RQNFGK?gdPi|q)j`&8)}KJ{qo6dixa", + "9 at yYyVg+%lo0nO+Tw0-w2hJ%mafyWL)|", + ")?W6Bi%FWuGPA1Dru$XR4SZANsAthU2EoKHF6oEtKq`rwP", + "(VNegnI_NI%;ma$)wj{k!@KFB30Yo)IOrl>)$)D|+(5h&+%2vuwGuy^@S8FT^s21V5};>VA9Iu;?8bHz#r<;JtfZDI1(FT at edh0#", + "MYW$A1qkMGIwTZqqdYNE3gl#zp&NbL9Mp=voqN|;?gqR&4$)1`znddtEyuKS*^nMMD=0^>", + "7^z6-C4P67UWOXuMBubP>j6i~03aR at jD^-Y`JSYu#Yp0P8dLLJ0QOPE8=BoiuRX59YW7xg", + "WiexjHX%&0?`ZQCdxCdL^qd1v at kOjQKaWo2Y1++~LcA%FTq?5o%}fX1-RIvlB)1#iTNomGnUL=nM!>Ix|AGtON7!F1O?53kqlC2o-`ZGw*+s", + "NM$^9znsIJMwlgscE`|O3|;BRgsQMYm~`uv+nvuv`nigRa}X=BX=A5Sw$)WEklF7&c>_~$", + "zJ(m--bqXgiN^w-U=BJH9C0Qro(x90zo at rK;&TJ$nI@&k$ORgOb2s%gWbc}ok_27)Eoku~Fq|B-Ps+4J_", + "HPJMLJ2^_)cOU$p&3kNAlrV!)%~6r$BJ>OOi~=-<6byle{?zd4J{NG}o8tw|+#ZNLcpNwk", + "TuPE~sbJB8_RZb2DopStO+Wwux~F#S59zm%00I98;S&G=b(j+6vBYQl0ssI200dcD" + ], + "Asia/Tokyo": [ + "{Wp48S^xk9=GL at E0stWa761SMbT8$j-~luMgIxeB7KxBg5R*;y?l4Rl4neXH3cv!OtfK at h", + "KZzauI)S!FSDREPhhBS6Fb$&Vv#7%;?Te|>pF^0HBr&z_Tk<%vMW_QqjevRZOp8XVFgP<8", + "TkT#`9H&0Ua;gT1#rZLV0HqbAKK;_z at nO;6t0L}hOdk<>TdUa07R(LPI6@!GU$ty4=mwqHG-XVe*n(Yvgdlr+FqIU18!osi)48t~eWX8)&L", + "G)Ud^0zz@*AF+2r7E}Nf9Y72K~o-T%}D&z%}#7g2br?oH6ZiYH^%>J3D)TPKV(JY*bwjuw5=DsPB@~CrROZeN", + "x>A*H&CHrWt0`EP`m!F%waepl#|w#&`XgVc?~2M3uw$fGX~tf_Il!q#Aa<*8xlzQ2+7r6Z", + "^;Laa9F(WB_O&Dy2r>~@kSi16W{=6+i5GV=Uq~KX*~&HUN4oz7*O(gXIr}sDVcD`Ikgw#|", + "50ssal8s)Qy;?YGCf;*UKKKN!T4!Kqy_G;7PfQapugqvVBKy12v3TVH^L2", + "0?#5*VP~MOYfe$h`*L!7 at tiW|_^X1N%<}`7YahiUYtMu5XwmOf3?dr+ at zXHwW`z}ZDqZlT", + "<2Cs(<1%M!i6o&VK89BY0J7HPIo;O62s=|IbV^@y$N&#=>i^F00FcHoDl#3", + "Mdv&xvBYQl0ssI200dcD" + ], + "Europe/Dublin": [ + "{Wp48S^xk9=GL at E0stWa761SMbT8$j;0>b$_+0=h7KxBg5R*;&J77#T_U2R5sleVWFDmK~", + "Kzj5oh@`QKHvW^6V{jU-w>qg1tSt0c^vh;?qAqA0%t?;#S~6U8Qi", + "v&f1s9IH#g$m1k1a#3+lylw4mwT4QnEUUQdwg+xnEcBlgu31bAVabn41OMZVLGz6NDwG%X", + "uQar!b>GI{qSahE`AG}$kRWbuI~JCt;38)Xwbb~Qggs55t+MAHIxgDxzTJ;2xXx99+qCy4", + "45kC#v_l8fx|G&jlVvaciR<-wwf22l%4(t at S6tnX39#_K(4S0fu$FUs$isud9IKzCXB78NkARYq at 9Dc0TGkhz);NtM_SSzEffN", + "l{2^*CKGdp52h!52A)6q9fUSltXF{T*Ehc9Q7u8!W7pE(Fv$D$cKUAt6wY=DA1mGgxC*VX", + "q_If3G#FY6-Voj`fIKk`0}Cc72_SD{v>468LV{pyBI33^p0E?}RwDA6Pkq--C~0jF&Z at Pv", + "!dx_1SN_)jwz at P$(oK%P!Tk9?fRjK88yxhxlcFtTjjZ$DYssSsa#ufYrR+}}nKS+r384o~", + "!Uw$nwTbF~qgRsgr0N#d at KIinx%hQB(SJyjJtDtIy(%mDm}ZBGN}dV6K~om|=U", + "VGkbciQ=^$_14|gT21!YQ)@y*Rd0i_lS6gtPBE9+ah%WIJPwzUTjIr+J1XckkmA!6WE16%", + "CVAl{Dn&-)=G$Bjh?bh0$Xt1UDcgXJjXzzojuw0>paV~?Sa`VN3FysqFxTzfKVAu*ucq#+m=|KSSMvp_#@-lwd+q*ue", + "FQ^5<|<0R-u4qYMbRqzSn&", + "Q7jSuvc%b+EZc%>nI(+&0Tl1Y>a6v4`uNFD-7$QrhHgS7Wnv~rDgfH;rQw3+m`LJxoM4v#", + "gK@?|B{RHJ*VxZgk#!p<_&-sjxOda0YaiJ1UnG41VPv(Et%ElzKRMcO$AfgU+Xnwg5p2_+", + "NrnZ1WfEj^fmHd^sx@%JWKkh#zaK0ox%rdP)zUmGZZnqmZ_9L=%6R8ibJH0bOT$AGhDo6{", + "fJ?;_U;D|^>5by2ul at i4Zf()InfFN}00EQ=q#FPL>RM>svBYQl0ssI200dcD" + ], + "Europe/Lisbon": [ + "{Wp48S^xk9=GL at E0stWa761SMbT8$j;0=rf*IfWA7KxBg5R*;*X|PN+G3LqthM?xgkNUN_", + ")gCt1Sc%YT6^TTomk4yVHXeyvQj8}l<;q&s7K}#Vnc8lII1?)AHh$*>OKUU4S;*h>v*ep0", + "xTi1cK2{aY*|2D*-~K<;-{_W+r at NvZ7-|NZv($ek_C%VfP0xjWeZP#CPXD`IKkakjh(kUd", + "&H)m;^Q(jGjIyiyrcUMtOP)u3A>sw6ux;Bmp3x$4QvQKMx5TrCx_!$srWQuXNs&`9=^IY1", + "yc&C31!sQh7P=Mk*#6x8Z at 5^%ehR8UW$OWw0KMw}P1ycI^", + "4eh12oBUOV?S>n*d!+EM@>x#9PZD12iD=zaC;7`8dTfkU_6d}OZvSFSbGgXeKw}XyX at D=(", + ")D0!^DBGr8pXWBT$S-yhLP>Z3ys^VW3}RQ6{NGGVJG6vf*MH93vvNW6yLjie1;{4tVhg-KnSf|G`!", + "Z;j$7gJ1ows~RD=@n7I6aFd8rOR_7Y?E-$clI%1o5gA at O!KPa^(8^iFFeFykI-+z>E$mvp", + "E_h`vbHPjqkLs`Dn-0FV`R at z|h!S(Lb;M&|Exr!biY`%bfp$6`hK;GDhdP|^Q", + "*Ty*}1d41K>H2B{jrjE9aFK>yAQJBX9CD%-384S;0fw`PlprHGS`^b$oS-`I4VH7ji8ou-", + "g|060jfb1XcxiInT0oOoeR7#%e5Ug5#KW)nVSRvLHNe$SQHM at 2)`S9L7>RL@Qx%fmm7?3u7P5TywFQ}C at S(pq}|", + "eLPT{C^{<0Q?uU&kSVd%!~8q3;Z0s3OqzF`$HRkePL5Ywgiwn{R(zi+jmOBFrVpW;)@UsU#%$8BcV#h@}m$#!Fglo&bwb78aYqOG_W7h{eb(+39&-mk4EIXq_", + "_`30=8sfA3=!3TO_TyS5X22~?6nKngZ|bq=grdq=9X)3xAkA42L!~rmS)n3w-~;lgz%Fhn", + "(?rXdp2ho~9?wmVs2JwVt~?@FVD%`tN69{(i3oQa;O0$E$lF&~Y#_H6bu6(BiwblJ>;-Fs", + "gA$Y$*?=X)n1pFkKn}F~`>=4)+LLQk?L*P!bhAm0;`N~z3QbUIyVrm%kOZ(n1JJsm0pyb8", + "!GV{d*C!9KXv;4vD4Q>-k#+x(!V5L at w5M>v2V5a`B>t(|B", + "|Fqr4^-{S*%Ep~ojUtx_CRbSQ(uFwu2=KH)Q at EBs@ZqRXn4mU;B!68;;IQs3Ub=n&UU%*m", + "k&zwD36&JSwsN(%k&x?H+tN^6)23c`I0=5^N_R0~1>tsFZ`^`3z~rXSXT&qcwa#n!%+Z#P", + "PG}(D^_CCILXnF|GKwabBh*xFS?4rwGo2vtJUwzrbv_$5PO+`?$l{H-jGB at X%S!OAhw;D4", + "XFycN3!XqQ&EorJOD3>~^U%Luw!jF<;6_q-f-S|6{cQDfZ2(4Xf1MMLr1=SA=MwVf2%Pp%VP;jn)|5Tf!-DbUGn%I-rkYaH7?$$O!t)wwClAisr3eUoeB^~T=U*_P~Y2*KdnO87>B!19sV=xZ5", + "yApq26RxgqA|*tmsvtL#OhcF(C<0EGWHP)BFl?h)_*7!{LoJiv%RsOs!q->n+DcV%9~B at RbC_1G_1g6`Yd~8|%-=2l~oGN!~TVv2Bnk>7wW8L@^?vX$f3AiT)(4nrCuTm9%(XC6Nai", + "E(;}7&=YZagjAN$O-cN;1u{dTkElmB0GT$|Wa)QMmKrx<|LCJ9qlUoFsUbD^H^6_8(w<0{", + "ftj&O1~p_%lh5z;zNV&sP+", + "NF2>iK{8KMUf+)<-)VxXbLxD(alL}N$AT-ogNbJSMMYeX+Z{jS)b8TK^PB=FxyBxzfmFto", + "eo0R`a(%NO?#aEH9|?Cv00000NIsFh6BW2800DjO0RR918Pu^`vBYQl0ssI200dcD" + ], + "UTC": [ + "{Wp48S^xk9=GL at E0stWa761SMbT8$j-~e#|9bEt_7KxBg5R*|3h1|xhHLji!C57qW6L*|H", + "pEErm00000ygu;I+>V)?00B92fhY-(AGY&-0RR9100dcD" + ] + }, + "metadata": { + "version": "2020a" + } +} \ No newline at end of file diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py new file mode 100644 index 0000000000000..05db03abf25e9 --- /dev/null +++ b/Lib/test/test_zoneinfo/test_zoneinfo.py @@ -0,0 +1,1994 @@ +from __future__ import annotations + +import base64 +import contextlib +import dataclasses +import importlib.metadata +import io +import json +import lzma +import os +import pathlib +import pickle +import re +import shutil +import struct +import tempfile +import unittest +from datetime import date, datetime, time, timedelta, timezone + +from . import _support as test_support +from ._support import ( + OS_ENV_LOCK, + TZPATH_LOCK, + TZPATH_TEST_LOCK, + ZoneInfoTestBase, +) + +py_zoneinfo, c_zoneinfo = test_support.get_modules() + +try: + importlib.metadata.metadata("tzdata") + HAS_TZDATA_PKG = True +except importlib.metadata.PackageNotFoundError: + HAS_TZDATA_PKG = False + +ZONEINFO_DATA = None +ZONEINFO_DATA_V1 = None +TEMP_DIR = None +DATA_DIR = pathlib.Path(__file__).parent / "data" +ZONEINFO_JSON = DATA_DIR / "zoneinfo_data.json" + +# Useful constants +ZERO = timedelta(0) +ONE_H = timedelta(hours=1) + + +def setUpModule(): + global TEMP_DIR + global ZONEINFO_DATA + global ZONEINFO_DATA_V1 + + TEMP_DIR = pathlib.Path(tempfile.mkdtemp(prefix="zoneinfo")) + ZONEINFO_DATA = ZoneInfoData(ZONEINFO_JSON, TEMP_DIR / "v2") + ZONEINFO_DATA_V1 = ZoneInfoData(ZONEINFO_JSON, TEMP_DIR / "v1", v1=True) + + +def tearDownModule(): + shutil.rmtree(TEMP_DIR) + + +class TzPathUserMixin: + """ + Adds a setUp() and tearDown() to make TZPATH manipulations thread-safe. + + Any tests that require manipulation of the TZPATH global are necessarily + thread unsafe, so we will acquire a lock and reset the TZPATH variable + to the default state before each test and release the lock after the test + is through. + """ + + @property + def tzpath(self): # pragma: nocover + return None + + def setUp(self): + with contextlib.ExitStack() as stack: + stack.enter_context( + self.tzpath_context(self.tzpath, lock=TZPATH_TEST_LOCK) + ) + self.addCleanup(stack.pop_all().close) + + super().setUp() + + +class DatetimeSubclassMixin: + """ + Replaces all ZoneTransition transition dates with a datetime subclass. + """ + + class DatetimeSubclass(datetime): + @classmethod + def from_datetime(cls, dt): + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + tzinfo=dt.tzinfo, + fold=dt.fold, + ) + + def load_transition_examples(self, key): + transition_examples = super().load_transition_examples(key) + for zt in transition_examples: + dt = zt.transition + new_dt = self.DatetimeSubclass.from_datetime(dt) + new_zt = dataclasses.replace(zt, transition=new_dt) + yield new_zt + + +class ZoneInfoTest(TzPathUserMixin, ZoneInfoTestBase): + module = py_zoneinfo + class_name = "ZoneInfo" + + def setUp(self): + super().setUp() + + # This is necessary because various subclasses pull from different + # data sources (e.g. tzdata, V1 files, etc). + self.klass.clear_cache() + + @property + def zoneinfo_data(self): + return ZONEINFO_DATA + + @property + def tzpath(self): + return [self.zoneinfo_data.tzpath] + + def zone_from_key(self, key): + return self.klass(key) + + def zones(self): + return ZoneDumpData.transition_keys() + + def fixed_offset_zones(self): + return ZoneDumpData.fixed_offset_zones() + + def load_transition_examples(self, key): + return ZoneDumpData.load_transition_examples(key) + + def test_str(self): + # Zones constructed with a key must have str(zone) == key + for key in self.zones(): + with self.subTest(key): + zi = self.zone_from_key(key) + + self.assertEqual(str(zi), key) + + # Zones with no key constructed should have str(zone) == repr(zone) + file_key = self.zoneinfo_data.keys[0] + file_path = self.zoneinfo_data.path_from_key(file_key) + + with open(file_path, "rb") as f: + with self.subTest(test_name="Repr test", path=file_path): + zi_ff = self.klass.from_file(f) + self.assertEqual(str(zi_ff), repr(zi_ff)) + + def test_repr(self): + # The repr is not guaranteed, but I think we can insist that it at + # least contain the name of the class. + key = next(iter(self.zones())) + + zi = self.klass(key) + class_name = self.class_name + with self.subTest(name="from key"): + self.assertRegex(repr(zi), class_name) + + file_key = self.zoneinfo_data.keys[0] + file_path = self.zoneinfo_data.path_from_key(file_key) + with open(file_path, "rb") as f: + zi_ff = self.klass.from_file(f, key=file_key) + + with self.subTest(name="from file with key"): + self.assertRegex(repr(zi_ff), class_name) + + with open(file_path, "rb") as f: + zi_ff_nk = self.klass.from_file(f) + + with self.subTest(name="from file without key"): + self.assertRegex(repr(zi_ff_nk), class_name) + + def test_key_attribute(self): + key = next(iter(self.zones())) + + def from_file_nokey(key): + with open(self.zoneinfo_data.path_from_key(key), "rb") as f: + return self.klass.from_file(f) + + constructors = ( + ("Primary constructor", self.klass, key), + ("no_cache", self.klass.no_cache, key), + ("from_file", from_file_nokey, None), + ) + + for msg, constructor, expected in constructors: + zi = constructor(key) + + # Ensure that the key attribute is set to the input to ``key`` + with self.subTest(msg): + self.assertEqual(zi.key, expected) + + # Ensure that the key attribute is read-only + with self.subTest(f"{msg}: readonly"): + with self.assertRaises(AttributeError): + zi.key = "Some/Value" + + def test_bad_keys(self): + bad_keys = [ + "Eurasia/Badzone", # Plausible but does not exist + "BZQ", + "America.Los_Angeles", + "??", # Non-ascii + "America/New\ud800York", # Contains surrogate character + ] + + for bad_key in bad_keys: + with self.assertRaises(self.module.ZoneInfoNotFoundError): + self.klass(bad_key) + + def test_bad_keys_paths(self): + bad_keys = [ + "/America/Los_Angeles", # Absolute path + "America/Los_Angeles/", # Trailing slash - not normalized + "../zoneinfo/America/Los_Angeles", # Traverses above TZPATH + "America/../America/Los_Angeles", # Not normalized + "America/./Los_Angeles", + ] + + for bad_key in bad_keys: + with self.assertRaises(ValueError): + self.klass(bad_key) + + def test_bad_zones(self): + bad_zones = [ + b"", # Empty file + b"AAAA3" + b" " * 15, # Bad magic + ] + + for bad_zone in bad_zones: + fobj = io.BytesIO(bad_zone) + with self.assertRaises(ValueError): + self.klass.from_file(fobj) + + def test_fromutc_errors(self): + key = next(iter(self.zones())) + zone = self.zone_from_key(key) + + bad_values = [ + (datetime(2019, 1, 1, tzinfo=timezone.utc), ValueError), + (datetime(2019, 1, 1), ValueError), + (date(2019, 1, 1), TypeError), + (time(0), TypeError), + (0, TypeError), + ("2019-01-01", TypeError), + ] + + for val, exc_type in bad_values: + with self.subTest(val=val): + with self.assertRaises(exc_type): + zone.fromutc(val) + + def test_utc(self): + zi = self.klass("UTC") + dt = datetime(2020, 1, 1, tzinfo=zi) + + self.assertEqual(dt.utcoffset(), ZERO) + self.assertEqual(dt.dst(), ZERO) + self.assertEqual(dt.tzname(), "UTC") + + def test_unambiguous(self): + test_cases = [] + for key in self.zones(): + for zone_transition in self.load_transition_examples(key): + test_cases.append( + ( + key, + zone_transition.transition - timedelta(days=2), + zone_transition.offset_before, + ) + ) + + test_cases.append( + ( + key, + zone_transition.transition + timedelta(days=2), + zone_transition.offset_after, + ) + ) + + for key, dt, offset in test_cases: + with self.subTest(key=key, dt=dt, offset=offset): + tzi = self.zone_from_key(key) + dt = dt.replace(tzinfo=tzi) + + self.assertEqual(dt.tzname(), offset.tzname, dt) + self.assertEqual(dt.utcoffset(), offset.utcoffset, dt) + self.assertEqual(dt.dst(), offset.dst, dt) + + def test_folds_and_gaps(self): + test_cases = [] + for key in self.zones(): + tests = {"folds": [], "gaps": []} + for zt in self.load_transition_examples(key): + if zt.fold: + test_group = tests["folds"] + elif zt.gap: + test_group = tests["gaps"] + else: + # Assign a random variable here to disable the peephole + # optimizer so that coverage can see this line. + # See bpo-2506 for more information. + no_peephole_opt = None + continue + + # Cases are of the form key, dt, fold, offset + dt = zt.anomaly_start - timedelta(seconds=1) + test_group.append((dt, 0, zt.offset_before)) + test_group.append((dt, 1, zt.offset_before)) + + dt = zt.anomaly_start + test_group.append((dt, 0, zt.offset_before)) + test_group.append((dt, 1, zt.offset_after)) + + dt = zt.anomaly_start + timedelta(seconds=1) + test_group.append((dt, 0, zt.offset_before)) + test_group.append((dt, 1, zt.offset_after)) + + dt = zt.anomaly_end - timedelta(seconds=1) + test_group.append((dt, 0, zt.offset_before)) + test_group.append((dt, 1, zt.offset_after)) + + dt = zt.anomaly_end + test_group.append((dt, 0, zt.offset_after)) + test_group.append((dt, 1, zt.offset_after)) + + dt = zt.anomaly_end + timedelta(seconds=1) + test_group.append((dt, 0, zt.offset_after)) + test_group.append((dt, 1, zt.offset_after)) + + for grp, test_group in tests.items(): + test_cases.append(((key, grp), test_group)) + + for (key, grp), tests in test_cases: + with self.subTest(key=key, grp=grp): + tzi = self.zone_from_key(key) + + for dt, fold, offset in tests: + dt = dt.replace(fold=fold, tzinfo=tzi) + + self.assertEqual(dt.tzname(), offset.tzname, dt) + self.assertEqual(dt.utcoffset(), offset.utcoffset, dt) + self.assertEqual(dt.dst(), offset.dst, dt) + + def test_folds_from_utc(self): + tests = [] + for key in self.zones(): + zi = self.zone_from_key(key) + with self.subTest(key=key): + for zt in self.load_transition_examples(key): + if not zt.fold: + continue + + dt_utc = zt.transition_utc + dt_before_utc = dt_utc - timedelta(seconds=1) + dt_after_utc = dt_utc + timedelta(seconds=1) + + dt_before = dt_before_utc.astimezone(zi) + self.assertEqual(dt_before.fold, 0, (dt_before, dt_utc)) + + dt_after = dt_after_utc.astimezone(zi) + self.assertEqual(dt_after.fold, 1, (dt_after, dt_utc)) + + def test_time_variable_offset(self): + # self.zones() only ever returns variable-offset zones + for key in self.zones(): + zi = self.zone_from_key(key) + t = time(11, 15, 1, 34471, tzinfo=zi) + + with self.subTest(key=key): + self.assertIs(t.tzname(), None) + self.assertIs(t.utcoffset(), None) + self.assertIs(t.dst(), None) + + def test_time_fixed_offset(self): + for key, offset in self.fixed_offset_zones(): + zi = self.zone_from_key(key) + + t = time(11, 15, 1, 34471, tzinfo=zi) + + with self.subTest(key=key): + self.assertEqual(t.tzname(), offset.tzname) + self.assertEqual(t.utcoffset(), offset.utcoffset) + self.assertEqual(t.dst(), offset.dst) + + +class CZoneInfoTest(ZoneInfoTest): + module = c_zoneinfo + + def test_fold_mutate(self): + """Test that fold isn't mutated when no change is necessary. + + The underlying C API is capable of mutating datetime objects, and + may rely on the fact that addition of a datetime object returns a + new datetime; this test ensures that the input datetime to fromutc + is not mutated. + """ + + def to_subclass(dt): + class SameAddSubclass(type(dt)): + def __add__(self, other): + if other == timedelta(0): + return self + + return super().__add__(other) # pragma: nocover + + return SameAddSubclass( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + fold=dt.fold, + tzinfo=dt.tzinfo, + ) + + subclass = [False, True] + + key = "Europe/London" + zi = self.zone_from_key(key) + for zt in self.load_transition_examples(key): + if zt.fold and zt.offset_after.utcoffset == ZERO: + example = zt.transition_utc.replace(tzinfo=zi) + break + + for subclass in [False, True]: + if subclass: + dt = to_subclass(example) + else: + dt = example + + with self.subTest(subclass=subclass): + dt_fromutc = zi.fromutc(dt) + + self.assertEqual(dt_fromutc.fold, 1) + self.assertEqual(dt.fold, 0) + + +class ZoneInfoDatetimeSubclassTest(DatetimeSubclassMixin, ZoneInfoTest): + pass + + +class CZoneInfoDatetimeSubclassTest(DatetimeSubclassMixin, CZoneInfoTest): + pass + + +class ZoneInfoTestSubclass(ZoneInfoTest): + @classmethod + def setUpClass(cls): + super().setUpClass() + + class ZISubclass(cls.klass): + pass + + cls.class_name = "ZISubclass" + cls.parent_klass = cls.klass + cls.klass = ZISubclass + + def test_subclass_own_cache(self): + base_obj = self.parent_klass("Europe/London") + sub_obj = self.klass("Europe/London") + + self.assertIsNot(base_obj, sub_obj) + self.assertIsInstance(base_obj, self.parent_klass) + self.assertIsInstance(sub_obj, self.klass) + + +class CZoneInfoTestSubclass(ZoneInfoTest): + module = c_zoneinfo + + +class ZoneInfoV1Test(ZoneInfoTest): + @property + def zoneinfo_data(self): + return ZONEINFO_DATA_V1 + + def load_transition_examples(self, key): + # We will discard zdump examples outside the range epoch +/- 2**31, + # because they are not well-supported in Version 1 files. + epoch = datetime(1970, 1, 1) + max_offset_32 = timedelta(seconds=2 ** 31) + min_dt = epoch - max_offset_32 + max_dt = epoch + max_offset_32 + + for zt in ZoneDumpData.load_transition_examples(key): + if min_dt <= zt.transition <= max_dt: + yield zt + + +class CZoneInfoV1Test(ZoneInfoV1Test): + module = c_zoneinfo + + + at unittest.skipIf( + not HAS_TZDATA_PKG, "Skipping tzdata-specific tests: tzdata not installed" +) +class TZDataTests(ZoneInfoTest): + """ + Runs all the ZoneInfoTest tests, but against the tzdata package + + NOTE: The ZoneDumpData has frozen test data, but tzdata will update, so + some of the tests (particularly those related to the far future) may break + in the event that the time zone policies in the relevant time zones change. + """ + + @property + def tzpath(self): + return [] + + def zone_from_key(self, key): + return self.klass(key=key) + + + at unittest.skipIf( + not HAS_TZDATA_PKG, "Skipping tzdata-specific tests: tzdata not installed" +) +class CTZDataTests(TZDataTests): + module = c_zoneinfo + + +class WeirdZoneTest(ZoneInfoTestBase): + module = py_zoneinfo + + def test_one_transition(self): + LMT = ZoneOffset("LMT", -timedelta(hours=6, minutes=31, seconds=2)) + STD = ZoneOffset("STD", -timedelta(hours=6)) + + transitions = [ + ZoneTransition(datetime(1883, 6, 9, 14), LMT, STD), + ] + + after = "STD6" + + zf = self.construct_zone(transitions, after) + zi = self.klass.from_file(zf) + + dt0 = datetime(1883, 6, 9, 1, tzinfo=zi) + dt1 = datetime(1883, 6, 10, 1, tzinfo=zi) + + for dt, offset in [(dt0, LMT), (dt1, STD)]: + with self.subTest(name="local", dt=dt): + self.assertEqual(dt.tzname(), offset.tzname) + self.assertEqual(dt.utcoffset(), offset.utcoffset) + self.assertEqual(dt.dst(), offset.dst) + + dts = [ + ( + datetime(1883, 6, 9, 1, tzinfo=zi), + datetime(1883, 6, 9, 7, 31, 2, tzinfo=timezone.utc), + ), + ( + datetime(2010, 4, 1, 12, tzinfo=zi), + datetime(2010, 4, 1, 18, tzinfo=timezone.utc), + ), + ] + + for dt_local, dt_utc in dts: + with self.subTest(name="fromutc", dt=dt_local): + dt_actual = dt_utc.astimezone(zi) + self.assertEqual(dt_actual, dt_local) + + dt_utc_actual = dt_local.astimezone(timezone.utc) + self.assertEqual(dt_utc_actual, dt_utc) + + def test_one_zone_dst(self): + DST = ZoneOffset("DST", ONE_H, ONE_H) + transitions = [ + ZoneTransition(datetime(1970, 1, 1), DST, DST), + ] + + after = "STD0DST-1,0/0,J365/25" + + zf = self.construct_zone(transitions, after) + zi = self.klass.from_file(zf) + + dts = [ + datetime(1900, 3, 1), + datetime(1965, 9, 12), + datetime(1970, 1, 1), + datetime(2010, 11, 3), + datetime(2040, 1, 1), + ] + + for dt in dts: + dt = dt.replace(tzinfo=zi) + with self.subTest(dt=dt): + self.assertEqual(dt.tzname(), DST.tzname) + self.assertEqual(dt.utcoffset(), DST.utcoffset) + self.assertEqual(dt.dst(), DST.dst) + + def test_no_tz_str(self): + STD = ZoneOffset("STD", ONE_H, ZERO) + DST = ZoneOffset("DST", 2 * ONE_H, ONE_H) + + transitions = [] + for year in range(1996, 2000): + transitions.append( + ZoneTransition(datetime(year, 3, 1, 2), STD, DST) + ) + transitions.append( + ZoneTransition(datetime(year, 11, 1, 2), DST, STD) + ) + + after = "" + + zf = self.construct_zone(transitions, after) + + # According to RFC 8536, local times after the last transition time + # with an empty TZ string are unspecified. We will go with "hold the + # last transition", but the most we should promise is "doesn't crash." + zi = self.klass.from_file(zf) + + cases = [ + (datetime(1995, 1, 1), STD), + (datetime(1996, 4, 1), DST), + (datetime(1996, 11, 2), STD), + (datetime(2001, 1, 1), STD), + ] + + for dt, offset in cases: + dt = dt.replace(tzinfo=zi) + with self.subTest(dt=dt): + self.assertEqual(dt.tzname(), offset.tzname) + self.assertEqual(dt.utcoffset(), offset.utcoffset) + self.assertEqual(dt.dst(), offset.dst) + + # Test that offsets return None when using a datetime.time + t = time(0, tzinfo=zi) + with self.subTest("Testing datetime.time"): + self.assertIs(t.tzname(), None) + self.assertIs(t.utcoffset(), None) + self.assertIs(t.dst(), None) + + def test_tz_before_only(self): + # From RFC 8536 Section 3.2: + # + # If there are no transitions, local time for all timestamps is + # specified by the TZ string in the footer if present and nonempty; + # otherwise, it is specified by time type 0. + + offsets = [ + ZoneOffset("STD", ZERO, ZERO), + ZoneOffset("DST", ONE_H, ONE_H), + ] + + for offset in offsets: + # Phantom transition to set time type 0. + transitions = [ + ZoneTransition(None, offset, offset), + ] + + after = "" + + zf = self.construct_zone(transitions, after) + zi = self.klass.from_file(zf) + + dts = [ + datetime(1900, 1, 1), + datetime(1970, 1, 1), + datetime(2000, 1, 1), + ] + + for dt in dts: + dt = dt.replace(tzinfo=zi) + with self.subTest(offset=offset, dt=dt): + self.assertEqual(dt.tzname(), offset.tzname) + self.assertEqual(dt.utcoffset(), offset.utcoffset) + self.assertEqual(dt.dst(), offset.dst) + + def test_empty_zone(self): + zf = self.construct_zone([], "") + + with self.assertRaises(ValueError): + self.klass.from_file(zf) + + def test_zone_very_large_timestamp(self): + """Test when a transition is in the far past or future. + + Particularly, this is a concern if something: + + 1. Attempts to call ``datetime.timestamp`` for a datetime outside + of ``[datetime.min, datetime.max]``. + 2. Attempts to construct a timedelta outside of + ``[timedelta.min, timedelta.max]``. + + This actually occurs "in the wild", as some time zones on Ubuntu (at + least as of 2020) have an initial transition added at ``-2**58``. + """ + + LMT = ZoneOffset("LMT", timedelta(seconds=-968)) + GMT = ZoneOffset("GMT", ZERO) + + transitions = [ + (-(1 << 62), LMT, LMT), + ZoneTransition(datetime(1912, 1, 1), LMT, GMT), + ((1 << 62), GMT, GMT), + ] + + after = "GMT0" + + zf = self.construct_zone(transitions, after) + zi = self.klass.from_file(zf, key="Africa/Abidjan") + + offset_cases = [ + (datetime.min, LMT), + (datetime.max, GMT), + (datetime(1911, 12, 31), LMT), + (datetime(1912, 1, 2), GMT), + ] + + for dt_naive, offset in offset_cases: + dt = dt_naive.replace(tzinfo=zi) + with self.subTest(name="offset", dt=dt, offset=offset): + self.assertEqual(dt.tzname(), offset.tzname) + self.assertEqual(dt.utcoffset(), offset.utcoffset) + self.assertEqual(dt.dst(), offset.dst) + + utc_cases = [ + (datetime.min, datetime.min + timedelta(seconds=968)), + (datetime(1898, 12, 31, 23, 43, 52), datetime(1899, 1, 1)), + ( + datetime(1911, 12, 31, 23, 59, 59, 999999), + datetime(1912, 1, 1, 0, 16, 7, 999999), + ), + (datetime(1912, 1, 1, 0, 16, 8), datetime(1912, 1, 1, 0, 16, 8)), + (datetime(1970, 1, 1), datetime(1970, 1, 1)), + (datetime.max, datetime.max), + ] + + for naive_dt, naive_dt_utc in utc_cases: + dt = naive_dt.replace(tzinfo=zi) + dt_utc = naive_dt_utc.replace(tzinfo=timezone.utc) + + self.assertEqual(dt_utc.astimezone(zi), dt) + self.assertEqual(dt, dt_utc) + + def test_fixed_offset_phantom_transition(self): + UTC = ZoneOffset("UTC", ZERO, ZERO) + + transitions = [ZoneTransition(datetime(1970, 1, 1), UTC, UTC)] + + after = "UTC0" + zf = self.construct_zone(transitions, after) + zi = self.klass.from_file(zf, key="UTC") + + dt = datetime(2020, 1, 1, tzinfo=zi) + with self.subTest("datetime.datetime"): + self.assertEqual(dt.tzname(), UTC.tzname) + self.assertEqual(dt.utcoffset(), UTC.utcoffset) + self.assertEqual(dt.dst(), UTC.dst) + + t = time(0, tzinfo=zi) + with self.subTest("datetime.time"): + self.assertEqual(t.tzname(), UTC.tzname) + self.assertEqual(t.utcoffset(), UTC.utcoffset) + self.assertEqual(t.dst(), UTC.dst) + + def construct_zone(self, transitions, after=None, version=3): + # These are not used for anything, so we're not going to include + # them for now. + isutc = [] + isstd = [] + leap_seconds = [] + + offset_lists = [[], []] + trans_times_lists = [[], []] + trans_idx_lists = [[], []] + + v1_range = (-(2 ** 31), 2 ** 31) + v2_range = (-(2 ** 63), 2 ** 63) + ranges = [v1_range, v2_range] + + def zt_as_tuple(zt): + # zt may be a tuple (timestamp, offset_before, offset_after) or + # a ZoneTransition object ? this is to allow the timestamp to be + # values that are outside the valid range for datetimes but still + # valid 64-bit timestamps. + if isinstance(zt, tuple): + return zt + + if zt.transition: + trans_time = int(zt.transition_utc.timestamp()) + else: + trans_time = None + + return (trans_time, zt.offset_before, zt.offset_after) + + transitions = sorted(map(zt_as_tuple, transitions), key=lambda x: x[0]) + + for zt in transitions: + trans_time, offset_before, offset_after = zt + + for v, (dt_min, dt_max) in enumerate(ranges): + offsets = offset_lists[v] + trans_times = trans_times_lists[v] + trans_idx = trans_idx_lists[v] + + if trans_time is not None and not ( + dt_min <= trans_time <= dt_max + ): + continue + + if offset_before not in offsets: + offsets.append(offset_before) + + if offset_after not in offsets: + offsets.append(offset_after) + + if trans_time is not None: + trans_times.append(trans_time) + trans_idx.append(offsets.index(offset_after)) + + isutcnt = len(isutc) + isstdcnt = len(isstd) + leapcnt = len(leap_seconds) + + zonefile = io.BytesIO() + + time_types = ("l", "q") + for v in range(min((version, 2))): + offsets = offset_lists[v] + trans_times = trans_times_lists[v] + trans_idx = trans_idx_lists[v] + time_type = time_types[v] + + # Translate the offsets into something closer to the C values + abbrstr = bytearray() + ttinfos = [] + + for offset in offsets: + utcoff = int(offset.utcoffset.total_seconds()) + isdst = bool(offset.dst) + abbrind = len(abbrstr) + + ttinfos.append((utcoff, isdst, abbrind)) + abbrstr += offset.tzname.encode("ascii") + b"\x00" + abbrstr = bytes(abbrstr) + + typecnt = len(offsets) + timecnt = len(trans_times) + charcnt = len(abbrstr) + + # Write the header + zonefile.write(b"TZif") + zonefile.write(b"%d" % version) + zonefile.write(b" " * 15) + zonefile.write( + struct.pack( + ">6l", isutcnt, isstdcnt, leapcnt, timecnt, typecnt, charcnt + ) + ) + + # Now the transition data + zonefile.write(struct.pack(f">{timecnt}{time_type}", *trans_times)) + zonefile.write(struct.pack(f">{timecnt}B", *trans_idx)) + + for ttinfo in ttinfos: + zonefile.write(struct.pack(">lbb", *ttinfo)) + + zonefile.write(bytes(abbrstr)) + + # Now the metadata and leap seconds + zonefile.write(struct.pack(f"{isutcnt}b", *isutc)) + zonefile.write(struct.pack(f"{isstdcnt}b", *isstd)) + zonefile.write(struct.pack(f">{leapcnt}l", *leap_seconds)) + + # Finally we write the TZ string if we're writing a Version 2+ file + if v > 0: + zonefile.write(b"\x0A") + zonefile.write(after.encode("ascii")) + zonefile.write(b"\x0A") + + zonefile.seek(0) + return zonefile + + +class CWeirdZoneTest(WeirdZoneTest): + module = c_zoneinfo + + +class TZStrTest(ZoneInfoTestBase): + module = py_zoneinfo + + NORMAL = 0 + FOLD = 1 + GAP = 2 + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls._populate_test_cases() + cls.populate_tzstr_header() + + @classmethod + def populate_tzstr_header(cls): + out = bytearray() + # The TZif format always starts with a Version 1 file followed by + # the Version 2+ file. In this case, we have no transitions, just + # the tzstr in the footer, so up to the footer, the files are + # identical and we can just write the same file twice in a row. + for i in range(2): + out += b"TZif" # Magic value + out += b"3" # Version + out += b" " * 15 # Reserved + + # We will not write any of the manual transition parts + out += struct.pack(">6l", 0, 0, 0, 0, 0, 0) + + cls._tzif_header = bytes(out) + + def zone_from_tzstr(self, tzstr): + """Creates a zoneinfo file following a POSIX rule.""" + zonefile = io.BytesIO(self._tzif_header) + zonefile.seek(0, 2) + + # Write the footer + zonefile.write(b"\x0A") + zonefile.write(tzstr.encode("ascii")) + zonefile.write(b"\x0A") + + zonefile.seek(0) + + return self.klass.from_file(zonefile, key=tzstr) + + def test_tzstr_localized(self): + i = 0 + for tzstr, cases in self.test_cases.items(): + with self.subTest(tzstr=tzstr): + zi = self.zone_from_tzstr(tzstr) + + for dt_naive, offset, _ in cases: + dt = dt_naive.replace(tzinfo=zi) + + with self.subTest(tzstr=tzstr, dt=dt, offset=offset): + self.assertEqual(dt.tzname(), offset.tzname) + self.assertEqual(dt.utcoffset(), offset.utcoffset) + self.assertEqual(dt.dst(), offset.dst) + + def test_tzstr_from_utc(self): + for tzstr, cases in self.test_cases.items(): + with self.subTest(tzstr=tzstr): + zi = self.zone_from_tzstr(tzstr) + + for dt_naive, offset, dt_type in cases: + if dt_type == self.GAP: + continue # Cannot create a gap from UTC + + dt_utc = (dt_naive - offset.utcoffset).replace( + tzinfo=timezone.utc + ) + + # Check that we can go UTC -> Our zone + dt_act = dt_utc.astimezone(zi) + dt_exp = dt_naive.replace(tzinfo=zi) + + self.assertEqual(dt_act, dt_exp) + + if dt_type == self.FOLD: + self.assertEqual(dt_act.fold, dt_naive.fold, dt_naive) + else: + self.assertEqual(dt_act.fold, 0) + + # Now check that we can go our zone -> UTC + dt_act = dt_exp.astimezone(timezone.utc) + + self.assertEqual(dt_act, dt_utc) + + def test_invalid_tzstr(self): + invalid_tzstrs = [ + "PST8PDT", # DST but no transition specified + "+11", # Unquoted alphanumeric + "GMT,M3.2.0/2,M11.1.0/3", # Transition rule but no DST + "GMT0+11,M3.2.0/2,M11.1.0/3", # Unquoted alphanumeric in DST + "PST8PDT,M3.2.0/2", # Only one transition rule + # Invalid offsets + "STD+25", + "STD-25", + "STD+374", + "STD+374DST,M3.2.0/2,M11.1.0/3", + "STD+23DST+25,M3.2.0/2,M11.1.0/3", + "STD-23DST-25,M3.2.0/2,M11.1.0/3", + # Completely invalid dates + "AAA4BBB,M1443339,M11.1.0/3", + "AAA4BBB,M3.2.0/2,0349309483959c", + # Invalid months + "AAA4BBB,M13.1.1/2,M1.1.1/2", + "AAA4BBB,M1.1.1/2,M13.1.1/2", + "AAA4BBB,M0.1.1/2,M1.1.1/2", + "AAA4BBB,M1.1.1/2,M0.1.1/2", + # Invalid weeks + "AAA4BBB,M1.6.1/2,M1.1.1/2", + "AAA4BBB,M1.1.1/2,M1.6.1/2", + # Invalid weekday + "AAA4BBB,M1.1.7/2,M2.1.1/2", + "AAA4BBB,M1.1.1/2,M2.1.7/2", + # Invalid numeric offset + "AAA4BBB,-1/2,20/2", + "AAA4BBB,1/2,-1/2", + "AAA4BBB,367,20/2", + "AAA4BBB,1/2,367/2", + # Invalid julian offset + "AAA4BBB,J0/2,J20/2", + "AAA4BBB,J20/2,J366/2", + ] + + for invalid_tzstr in invalid_tzstrs: + with self.subTest(tzstr=invalid_tzstr): + # Not necessarily a guaranteed property, but we should show + # the problematic TZ string if that's the cause of failure. + tzstr_regex = re.escape(invalid_tzstr) + with self.assertRaisesRegex(ValueError, tzstr_regex): + self.zone_from_tzstr(invalid_tzstr) + + @classmethod + def _populate_test_cases(cls): + # This method uses a somewhat unusual style in that it populates the + # test cases for each tzstr by using a decorator to automatically call + # a function that mutates the current dictionary of test cases. + # + # The population of the test cases is done in individual functions to + # give each set of test cases its own namespace in which to define + # its offsets (this way we don't have to worry about variable reuse + # causing problems if someone makes a typo). + # + # The decorator for calling is used to make it more obvious that each + # function is actually called (if it's not decorated, it's not called). + def call(f): + """Decorator to call the addition methods. + + This will call a function which adds at least one new entry into + the `cases` dictionary. The decorator will also assert that + something was added to the dictionary. + """ + prev_len = len(cases) + f() + assert len(cases) > prev_len, "Function did not add a test case!" + + NORMAL = cls.NORMAL + FOLD = cls.FOLD + GAP = cls.GAP + + cases = {} + + @call + def _add(): + # Transition to EDT on the 2nd Sunday in March at 4 AM, and + # transition back on the first Sunday in November at 3AM + tzstr = "EST5EDT,M3.2.0/4:00,M11.1.0/3:00" + + EST = ZoneOffset("EST", timedelta(hours=-5), ZERO) + EDT = ZoneOffset("EDT", timedelta(hours=-4), ONE_H) + + cases[tzstr] = ( + (datetime(2019, 3, 9), EST, NORMAL), + (datetime(2019, 3, 10, 3, 59), EST, NORMAL), + (datetime(2019, 3, 10, 4, 0, fold=0), EST, GAP), + (datetime(2019, 3, 10, 4, 0, fold=1), EDT, GAP), + (datetime(2019, 3, 10, 4, 1, fold=0), EST, GAP), + (datetime(2019, 3, 10, 4, 1, fold=1), EDT, GAP), + (datetime(2019, 11, 2), EDT, NORMAL), + (datetime(2019, 11, 3, 1, 59, fold=1), EDT, NORMAL), + (datetime(2019, 11, 3, 2, 0, fold=0), EDT, FOLD), + (datetime(2019, 11, 3, 2, 0, fold=1), EST, FOLD), + (datetime(2020, 3, 8, 3, 59), EST, NORMAL), + (datetime(2020, 3, 8, 4, 0, fold=0), EST, GAP), + (datetime(2020, 3, 8, 4, 0, fold=1), EDT, GAP), + (datetime(2020, 11, 1, 1, 59, fold=1), EDT, NORMAL), + (datetime(2020, 11, 1, 2, 0, fold=0), EDT, FOLD), + (datetime(2020, 11, 1, 2, 0, fold=1), EST, FOLD), + ) + + @call + def _add(): + # Transition to BST happens on the last Sunday in March at 1 AM GMT + # and the transition back happens the last Sunday in October at 2AM BST + tzstr = "GMT0BST-1,M3.5.0/1:00,M10.5.0/2:00" + + GMT = ZoneOffset("GMT", ZERO, ZERO) + BST = ZoneOffset("BST", ONE_H, ONE_H) + + cases[tzstr] = ( + (datetime(2019, 3, 30), GMT, NORMAL), + (datetime(2019, 3, 31, 0, 59), GMT, NORMAL), + (datetime(2019, 3, 31, 2, 0), BST, NORMAL), + (datetime(2019, 10, 26), BST, NORMAL), + (datetime(2019, 10, 27, 0, 59, fold=1), BST, NORMAL), + (datetime(2019, 10, 27, 1, 0, fold=0), BST, GAP), + (datetime(2019, 10, 27, 2, 0, fold=1), GMT, GAP), + (datetime(2020, 3, 29, 0, 59), GMT, NORMAL), + (datetime(2020, 3, 29, 2, 0), BST, NORMAL), + (datetime(2020, 10, 25, 0, 59, fold=1), BST, NORMAL), + (datetime(2020, 10, 25, 1, 0, fold=0), BST, FOLD), + (datetime(2020, 10, 25, 2, 0, fold=1), GMT, NORMAL), + ) + + @call + def _add(): + # Austrialian time zone - DST start is chronologically first + tzstr = "AEST-10AEDT,M10.1.0/2,M4.1.0/3" + + AEST = ZoneOffset("AEST", timedelta(hours=10), ZERO) + AEDT = ZoneOffset("AEDT", timedelta(hours=11), ONE_H) + + cases[tzstr] = ( + (datetime(2019, 4, 6), AEDT, NORMAL), + (datetime(2019, 4, 7, 1, 59), AEDT, NORMAL), + (datetime(2019, 4, 7, 1, 59, fold=1), AEDT, NORMAL), + (datetime(2019, 4, 7, 2, 0, fold=0), AEDT, FOLD), + (datetime(2019, 4, 7, 2, 1, fold=0), AEDT, FOLD), + (datetime(2019, 4, 7, 2, 0, fold=1), AEST, FOLD), + (datetime(2019, 4, 7, 2, 1, fold=1), AEST, FOLD), + (datetime(2019, 4, 7, 3, 0, fold=0), AEST, NORMAL), + (datetime(2019, 4, 7, 3, 0, fold=1), AEST, NORMAL), + (datetime(2019, 10, 5, 0), AEST, NORMAL), + (datetime(2019, 10, 6, 1, 59), AEST, NORMAL), + (datetime(2019, 10, 6, 2, 0, fold=0), AEST, GAP), + (datetime(2019, 10, 6, 2, 0, fold=1), AEDT, GAP), + (datetime(2019, 10, 6, 3, 0), AEDT, NORMAL), + ) + + @call + def _add(): + # Irish time zone - negative DST + tzstr = "IST-1GMT0,M10.5.0,M3.5.0/1" + + GMT = ZoneOffset("GMT", ZERO, -ONE_H) + IST = ZoneOffset("IST", ONE_H, ZERO) + + cases[tzstr] = ( + (datetime(2019, 3, 30), GMT, NORMAL), + (datetime(2019, 3, 31, 0, 59), GMT, NORMAL), + (datetime(2019, 3, 31, 2, 0), IST, NORMAL), + (datetime(2019, 10, 26), IST, NORMAL), + (datetime(2019, 10, 27, 0, 59, fold=1), IST, NORMAL), + (datetime(2019, 10, 27, 1, 0, fold=0), IST, FOLD), + (datetime(2019, 10, 27, 1, 0, fold=1), GMT, FOLD), + (datetime(2019, 10, 27, 2, 0, fold=1), GMT, NORMAL), + (datetime(2020, 3, 29, 0, 59), GMT, NORMAL), + (datetime(2020, 3, 29, 2, 0), IST, NORMAL), + (datetime(2020, 10, 25, 0, 59, fold=1), IST, NORMAL), + (datetime(2020, 10, 25, 1, 0, fold=0), IST, FOLD), + (datetime(2020, 10, 25, 2, 0, fold=1), GMT, NORMAL), + ) + + @call + def _add(): + # Pacific/Kosrae: Fixed offset zone with a quoted numerical tzname + tzstr = "<+11>-11" + + cases[tzstr] = ( + ( + datetime(2020, 1, 1), + ZoneOffset("+11", timedelta(hours=11)), + NORMAL, + ), + ) + + @call + def _add(): + # Quoted STD and DST, transitions at 24:00 + tzstr = "<-04>4<-03>,M9.1.6/24,M4.1.6/24" + + M04 = ZoneOffset("-04", timedelta(hours=-4)) + M03 = ZoneOffset("-03", timedelta(hours=-3), ONE_H) + + cases[tzstr] = ( + (datetime(2020, 5, 1), M04, NORMAL), + (datetime(2020, 11, 1), M03, NORMAL), + ) + + @call + def _add(): + # Permanent daylight saving time is modeled with transitions at 0/0 + # and J365/25, as mentioned in RFC 8536 Section 3.3.1 + tzstr = "EST5EDT,0/0,J365/25" + + EDT = ZoneOffset("EDT", timedelta(hours=-4), ONE_H) + + cases[tzstr] = ( + (datetime(2019, 1, 1), EDT, NORMAL), + (datetime(2019, 6, 1), EDT, NORMAL), + (datetime(2019, 12, 31, 23, 59, 59, 999999), EDT, NORMAL), + (datetime(2020, 1, 1), EDT, NORMAL), + (datetime(2020, 3, 1), EDT, NORMAL), + (datetime(2020, 6, 1), EDT, NORMAL), + (datetime(2020, 12, 31, 23, 59, 59, 999999), EDT, NORMAL), + (datetime(2400, 1, 1), EDT, NORMAL), + (datetime(2400, 3, 1), EDT, NORMAL), + (datetime(2400, 12, 31, 23, 59, 59, 999999), EDT, NORMAL), + ) + + @call + def _add(): + # Transitions on March 1st and November 1st of each year + tzstr = "AAA3BBB,J60/12,J305/12" + + AAA = ZoneOffset("AAA", timedelta(hours=-3)) + BBB = ZoneOffset("BBB", timedelta(hours=-2), ONE_H) + + cases[tzstr] = ( + (datetime(2019, 1, 1), AAA, NORMAL), + (datetime(2019, 2, 28), AAA, NORMAL), + (datetime(2019, 3, 1, 11, 59), AAA, NORMAL), + (datetime(2019, 3, 1, 12, fold=0), AAA, GAP), + (datetime(2019, 3, 1, 12, fold=1), BBB, GAP), + (datetime(2019, 3, 1, 13), BBB, NORMAL), + (datetime(2019, 11, 1, 10, 59), BBB, NORMAL), + (datetime(2019, 11, 1, 11, fold=0), BBB, FOLD), + (datetime(2019, 11, 1, 11, fold=1), AAA, FOLD), + (datetime(2019, 11, 1, 12), AAA, NORMAL), + (datetime(2019, 12, 31, 23, 59, 59, 999999), AAA, NORMAL), + (datetime(2020, 1, 1), AAA, NORMAL), + (datetime(2020, 2, 29), AAA, NORMAL), + (datetime(2020, 3, 1, 11, 59), AAA, NORMAL), + (datetime(2020, 3, 1, 12, fold=0), AAA, GAP), + (datetime(2020, 3, 1, 12, fold=1), BBB, GAP), + (datetime(2020, 3, 1, 13), BBB, NORMAL), + (datetime(2020, 11, 1, 10, 59), BBB, NORMAL), + (datetime(2020, 11, 1, 11, fold=0), BBB, FOLD), + (datetime(2020, 11, 1, 11, fold=1), AAA, FOLD), + (datetime(2020, 11, 1, 12), AAA, NORMAL), + (datetime(2020, 12, 31, 23, 59, 59, 999999), AAA, NORMAL), + ) + + @call + def _add(): + # Taken from America/Godthab, this rule has a transition on the + # Saturday before the last Sunday of March and October, at 22:00 + # and 23:00, respectively. This is encoded with negative start + # and end transition times. + tzstr = "<-03>3<-02>,M3.5.0/-2,M10.5.0/-1" + + N03 = ZoneOffset("-03", timedelta(hours=-3)) + N02 = ZoneOffset("-02", timedelta(hours=-2), ONE_H) + + cases[tzstr] = ( + (datetime(2020, 3, 27), N03, NORMAL), + (datetime(2020, 3, 28, 21, 59, 59), N03, NORMAL), + (datetime(2020, 3, 28, 22, fold=0), N03, GAP), + (datetime(2020, 3, 28, 22, fold=1), N02, GAP), + (datetime(2020, 3, 28, 23), N02, NORMAL), + (datetime(2020, 10, 24, 21), N02, NORMAL), + (datetime(2020, 10, 24, 22, fold=0), N02, FOLD), + (datetime(2020, 10, 24, 22, fold=1), N03, FOLD), + (datetime(2020, 10, 24, 23), N03, NORMAL), + ) + + @call + def _add(): + # Transition times with minutes and seconds + tzstr = "AAA3BBB,M3.2.0/01:30,M11.1.0/02:15:45" + + AAA = ZoneOffset("AAA", timedelta(hours=-3)) + BBB = ZoneOffset("BBB", timedelta(hours=-2), ONE_H) + + cases[tzstr] = ( + (datetime(2012, 3, 11, 1, 0), AAA, NORMAL), + (datetime(2012, 3, 11, 1, 30, fold=0), AAA, GAP), + (datetime(2012, 3, 11, 1, 30, fold=1), BBB, GAP), + (datetime(2012, 3, 11, 2, 30), BBB, NORMAL), + (datetime(2012, 11, 4, 1, 15, 44, 999999), BBB, NORMAL), + (datetime(2012, 11, 4, 1, 15, 45, fold=0), BBB, FOLD), + (datetime(2012, 11, 4, 1, 15, 45, fold=1), AAA, FOLD), + (datetime(2012, 11, 4, 2, 15, 45), AAA, NORMAL), + ) + + cls.test_cases = cases + + +class CTZStrTest(TZStrTest): + module = c_zoneinfo + + +class ZoneInfoCacheTest(TzPathUserMixin, ZoneInfoTestBase): + module = py_zoneinfo + + def setUp(self): + self.klass.clear_cache() + super().setUp() + + @property + def zoneinfo_data(self): + return ZONEINFO_DATA + + @property + def tzpath(self): + return [self.zoneinfo_data.tzpath] + + def test_ephemeral_zones(self): + self.assertIs( + self.klass("America/Los_Angeles"), self.klass("America/Los_Angeles") + ) + + def test_strong_refs(self): + tz0 = self.klass("Australia/Sydney") + tz1 = self.klass("Australia/Sydney") + + self.assertIs(tz0, tz1) + + def test_no_cache(self): + + tz0 = self.klass("Europe/Lisbon") + tz1 = self.klass.no_cache("Europe/Lisbon") + + self.assertIsNot(tz0, tz1) + + def test_cache_reset_tzpath(self): + """Test that the cache persists when tzpath has been changed. + + The PEP specifies that as long as a reference exists to one zone + with a given key, the primary constructor must continue to return + the same object. + """ + zi0 = self.klass("America/Los_Angeles") + with self.tzpath_context([]): + zi1 = self.klass("America/Los_Angeles") + + self.assertIs(zi0, zi1) + + def test_clear_cache_explicit_none(self): + la0 = self.klass("America/Los_Angeles") + self.klass.clear_cache(only_keys=None) + la1 = self.klass("America/Los_Angeles") + + self.assertIsNot(la0, la1) + + def test_clear_cache_one_key(self): + """Tests that you can clear a single key from the cache.""" + la0 = self.klass("America/Los_Angeles") + dub0 = self.klass("Europe/Dublin") + + self.klass.clear_cache(only_keys=["America/Los_Angeles"]) + + la1 = self.klass("America/Los_Angeles") + dub1 = self.klass("Europe/Dublin") + + self.assertIsNot(la0, la1) + self.assertIs(dub0, dub1) + + def test_clear_cache_two_keys(self): + la0 = self.klass("America/Los_Angeles") + dub0 = self.klass("Europe/Dublin") + tok0 = self.klass("Asia/Tokyo") + + self.klass.clear_cache( + only_keys=["America/Los_Angeles", "Europe/Dublin"] + ) + + la1 = self.klass("America/Los_Angeles") + dub1 = self.klass("Europe/Dublin") + tok1 = self.klass("Asia/Tokyo") + + self.assertIsNot(la0, la1) + self.assertIsNot(dub0, dub1) + self.assertIs(tok0, tok1) + + +class CZoneInfoCacheTest(ZoneInfoCacheTest): + module = c_zoneinfo + + +class ZoneInfoPickleTest(TzPathUserMixin, ZoneInfoTestBase): + module = py_zoneinfo + + def setUp(self): + self.klass.clear_cache() + + with contextlib.ExitStack() as stack: + stack.enter_context(test_support.set_zoneinfo_module(self.module)) + self.addCleanup(stack.pop_all().close) + + super().setUp() + + @property + def zoneinfo_data(self): + return ZONEINFO_DATA + + @property + def tzpath(self): + return [self.zoneinfo_data.tzpath] + + def test_cache_hit(self): + zi_in = self.klass("Europe/Dublin") + pkl = pickle.dumps(zi_in) + zi_rt = pickle.loads(pkl) + + with self.subTest(test="Is non-pickled ZoneInfo"): + self.assertIs(zi_in, zi_rt) + + zi_rt2 = pickle.loads(pkl) + with self.subTest(test="Is unpickled ZoneInfo"): + self.assertIs(zi_rt, zi_rt2) + + def test_cache_miss(self): + zi_in = self.klass("Europe/Dublin") + pkl = pickle.dumps(zi_in) + + del zi_in + self.klass.clear_cache() # Induce a cache miss + zi_rt = pickle.loads(pkl) + zi_rt2 = pickle.loads(pkl) + + self.assertIs(zi_rt, zi_rt2) + + def test_no_cache(self): + zi_no_cache = self.klass.no_cache("Europe/Dublin") + + pkl = pickle.dumps(zi_no_cache) + zi_rt = pickle.loads(pkl) + + with self.subTest(test="Not the pickled object"): + self.assertIsNot(zi_rt, zi_no_cache) + + zi_rt2 = pickle.loads(pkl) + with self.subTest(test="Not a second unpickled object"): + self.assertIsNot(zi_rt, zi_rt2) + + zi_cache = self.klass("Europe/Dublin") + with self.subTest(test="Not a cached object"): + self.assertIsNot(zi_rt, zi_cache) + + def test_from_file(self): + key = "Europe/Dublin" + with open(self.zoneinfo_data.path_from_key(key), "rb") as f: + zi_nokey = self.klass.from_file(f) + + f.seek(0) + zi_key = self.klass.from_file(f, key=key) + + test_cases = [ + (zi_key, "ZoneInfo with key"), + (zi_nokey, "ZoneInfo without key"), + ] + + for zi, test_name in test_cases: + with self.subTest(test_name=test_name): + with self.assertRaises(pickle.PicklingError): + pickle.dumps(zi) + + def test_pickle_after_from_file(self): + # This may be a bit of paranoia, but this test is to ensure that no + # global state is maintained in order to handle the pickle cache and + # from_file behavior, and that it is possible to interweave the + # constructors of each of these and pickling/unpickling without issues. + key = "Europe/Dublin" + zi = self.klass(key) + + pkl_0 = pickle.dumps(zi) + zi_rt_0 = pickle.loads(pkl_0) + self.assertIs(zi, zi_rt_0) + + with open(self.zoneinfo_data.path_from_key(key), "rb") as f: + zi_ff = self.klass.from_file(f, key=key) + + pkl_1 = pickle.dumps(zi) + zi_rt_1 = pickle.loads(pkl_1) + self.assertIs(zi, zi_rt_1) + + with self.assertRaises(pickle.PicklingError): + pickle.dumps(zi_ff) + + pkl_2 = pickle.dumps(zi) + zi_rt_2 = pickle.loads(pkl_2) + self.assertIs(zi, zi_rt_2) + + +class CZoneInfoPickleTest(ZoneInfoPickleTest): + module = c_zoneinfo + + +class CallingConventionTest(ZoneInfoTestBase): + """Tests for functions with restricted calling conventions.""" + + module = py_zoneinfo + + @property + def zoneinfo_data(self): + return ZONEINFO_DATA + + def test_from_file(self): + with open(self.zoneinfo_data.path_from_key("UTC"), "rb") as f: + with self.assertRaises(TypeError): + self.klass.from_file(fobj=f) + + def test_clear_cache(self): + with self.assertRaises(TypeError): + self.klass.clear_cache(["UTC"]) + + +class CCallingConventionTest(CallingConventionTest): + module = c_zoneinfo + + +class TzPathTest(TzPathUserMixin, ZoneInfoTestBase): + module = py_zoneinfo + + @staticmethod + @contextlib.contextmanager + def python_tzpath_context(value): + path_var = "PYTHONTZPATH" + try: + with OS_ENV_LOCK: + old_env = os.environ.get(path_var, None) + os.environ[path_var] = value + yield + finally: + if old_env is None: + del os.environ[path_var] + else: + os.environ[path_var] = old_env # pragma: nocover + + def test_env_variable(self): + """Tests that the environment variable works with reset_tzpath.""" + new_paths = [ + ("", []), + ("/etc/zoneinfo", ["/etc/zoneinfo"]), + (f"/a/b/c{os.pathsep}/d/e/f", ["/a/b/c", "/d/e/f"]), + ] + + for new_path_var, expected_result in new_paths: + with self.python_tzpath_context(new_path_var): + with self.subTest(tzpath=new_path_var): + self.module.reset_tzpath() + tzpath = self.module.TZPATH + self.assertSequenceEqual(tzpath, expected_result) + + def test_env_variable_relative_paths(self): + test_cases = [ + [("path/to/somewhere",), ()], + [ + ("/usr/share/zoneinfo", "path/to/somewhere",), + ("/usr/share/zoneinfo",), + ], + [("../relative/path",), ()], + [ + ("/usr/share/zoneinfo", "../relative/path",), + ("/usr/share/zoneinfo",), + ], + [("path/to/somewhere", "../relative/path",), ()], + [ + ( + "/usr/share/zoneinfo", + "path/to/somewhere", + "../relative/path", + ), + ("/usr/share/zoneinfo",), + ], + ] + + for input_paths, expected_paths in test_cases: + path_var = os.pathsep.join(input_paths) + with self.python_tzpath_context(path_var): + with self.subTest("warning", path_var=path_var): + # Note: Per PEP 615 the warning is implementation-defined + # behavior, other implementations need not warn. + with self.assertWarns(self.module.InvalidTZPathWarning): + self.module.reset_tzpath() + + tzpath = self.module.TZPATH + with self.subTest("filtered", path_var=path_var): + self.assertSequenceEqual(tzpath, expected_paths) + + def test_reset_tzpath_kwarg(self): + self.module.reset_tzpath(to=["/a/b/c"]) + + self.assertSequenceEqual(self.module.TZPATH, ("/a/b/c",)) + + def test_reset_tzpath_relative_paths(self): + bad_values = [ + ("path/to/somewhere",), + ("/usr/share/zoneinfo", "path/to/somewhere",), + ("../relative/path",), + ("/usr/share/zoneinfo", "../relative/path",), + ("path/to/somewhere", "../relative/path",), + ("/usr/share/zoneinfo", "path/to/somewhere", "../relative/path",), + ] + for input_paths in bad_values: + with self.subTest(input_paths=input_paths): + with self.assertRaises(ValueError): + self.module.reset_tzpath(to=input_paths) + + def test_tzpath_type_error(self): + bad_values = [ + "/etc/zoneinfo:/usr/share/zoneinfo", + b"/etc/zoneinfo:/usr/share/zoneinfo", + 0, + ] + + for bad_value in bad_values: + with self.subTest(value=bad_value): + with self.assertRaises(TypeError): + self.module.reset_tzpath(bad_value) + + def test_tzpath_attribute(self): + tzpath_0 = ["/one", "/two"] + tzpath_1 = ["/three"] + + with self.tzpath_context(tzpath_0): + query_0 = self.module.TZPATH + + with self.tzpath_context(tzpath_1): + query_1 = self.module.TZPATH + + self.assertSequenceEqual(tzpath_0, query_0) + self.assertSequenceEqual(tzpath_1, query_1) + + +class CTzPathTest(TzPathTest): + module = c_zoneinfo + + +class TestModule(ZoneInfoTestBase): + module = py_zoneinfo + + def test_getattr_error(self): + with self.assertRaises(AttributeError): + self.module.NOATTRIBUTE + + def test_dir_contains_all(self): + """dir(self.module) should at least contain everything in __all__.""" + module_all_set = set(self.module.__all__) + module_dir_set = set(dir(self.module)) + + difference = module_all_set - module_dir_set + + self.assertFalse(difference) + + def test_dir_unique(self): + """Test that there are no duplicates in dir(self.module)""" + module_dir = dir(self.module) + module_unique = set(module_dir) + + self.assertCountEqual(module_dir, module_unique) + + +class CTestModule(TestModule): + module = c_zoneinfo + + +class ExtensionBuiltTest(unittest.TestCase): + """Smoke test to ensure that the C and Python extensions are both tested. + + Because the intention is for the Python and C versions of ZoneInfo to + behave identically, these tests necessarily rely on implementation details, + so the tests may need to be adjusted if the implementations change. Do not + rely on these tests as an indication of stable properties of these classes. + """ + + def test_cache_location(self): + # The pure Python version stores caches on attributes, but the C + # extension stores them in C globals (at least for now) + self.assertFalse(hasattr(c_zoneinfo.ZoneInfo, "_weak_cache")) + self.assertTrue(hasattr(py_zoneinfo.ZoneInfo, "_weak_cache")) + + def test_gc_tracked(self): + # The pure Python version is tracked by the GC but (for now) the C + # version is not. + import gc + + self.assertTrue(gc.is_tracked(py_zoneinfo.ZoneInfo)) + self.assertFalse(gc.is_tracked(c_zoneinfo.ZoneInfo)) + + + at dataclasses.dataclass(frozen=True) +class ZoneOffset: + tzname: str + utcoffset: timedelta + dst: timedelta = ZERO + + + at dataclasses.dataclass(frozen=True) +class ZoneTransition: + transition: datetime + offset_before: ZoneOffset + offset_after: ZoneOffset + + @property + def transition_utc(self): + return (self.transition - self.offset_before.utcoffset).replace( + tzinfo=timezone.utc + ) + + @property + def fold(self): + """Whether this introduces a fold""" + return self.offset_before.utcoffset > self.offset_after.utcoffset + + @property + def gap(self): + """Whether this introduces a gap""" + return self.offset_before.utcoffset < self.offset_after.utcoffset + + @property + def delta(self): + return self.offset_after.utcoffset - self.offset_before.utcoffset + + @property + def anomaly_start(self): + if self.fold: + return self.transition + self.delta + else: + return self.transition + + @property + def anomaly_end(self): + if not self.fold: + return self.transition + self.delta + else: + return self.transition + + +class ZoneInfoData: + def __init__(self, source_json, tzpath, v1=False): + self.tzpath = pathlib.Path(tzpath) + self.keys = [] + self.v1 = v1 + self._populate_tzpath(source_json) + + def path_from_key(self, key): + return self.tzpath / key + + def _populate_tzpath(self, source_json): + with open(source_json, "rb") as f: + zoneinfo_dict = json.load(f) + + zoneinfo_data = zoneinfo_dict["data"] + + for key, value in zoneinfo_data.items(): + self.keys.append(key) + raw_data = self._decode_text(value) + + if self.v1: + data = self._convert_to_v1(raw_data) + else: + data = raw_data + + destination = self.path_from_key(key) + destination.parent.mkdir(exist_ok=True, parents=True) + with open(destination, "wb") as f: + f.write(data) + + def _decode_text(self, contents): + raw_data = b"".join(map(str.encode, contents)) + decoded = base64.b85decode(raw_data) + + return lzma.decompress(decoded) + + def _convert_to_v1(self, contents): + assert contents[0:4] == b"TZif", "Invalid TZif data found!" + version = int(contents[4:5]) + + header_start = 4 + 16 + header_end = header_start + 24 # 6l == 24 bytes + assert version >= 2, "Version 1 file found: no conversion necessary" + isutcnt, isstdcnt, leapcnt, timecnt, typecnt, charcnt = struct.unpack( + ">6l", contents[header_start:header_end] + ) + + file_size = ( + timecnt * 5 + + typecnt * 6 + + charcnt + + leapcnt * 8 + + isstdcnt + + isutcnt + ) + file_size += header_end + out = b"TZif" + b"\x00" + contents[5:file_size] + + assert ( + contents[file_size : (file_size + 4)] == b"TZif" + ), "Version 2 file not truncated at Version 2 header" + + return out + + +class ZoneDumpData: + @classmethod + def transition_keys(cls): + return cls._get_zonedump().keys() + + @classmethod + def load_transition_examples(cls, key): + return cls._get_zonedump()[key] + + @classmethod + def fixed_offset_zones(cls): + if not cls._FIXED_OFFSET_ZONES: + cls._populate_fixed_offsets() + + return cls._FIXED_OFFSET_ZONES.items() + + @classmethod + def _get_zonedump(cls): + if not cls._ZONEDUMP_DATA: + cls._populate_zonedump_data() + return cls._ZONEDUMP_DATA + + @classmethod + def _populate_fixed_offsets(cls): + cls._FIXED_OFFSET_ZONES = { + "UTC": ZoneOffset("UTC", ZERO, ZERO), + } + + @classmethod + def _populate_zonedump_data(cls): + def _Africa_Abidjan(): + LMT = ZoneOffset("LMT", timedelta(seconds=-968)) + GMT = ZoneOffset("GMT", ZERO) + + return [ + ZoneTransition(datetime(1912, 1, 1), LMT, GMT), + ] + + def _Africa_Casablanca(): + P00_s = ZoneOffset("+00", ZERO, ZERO) + P01_d = ZoneOffset("+01", ONE_H, ONE_H) + P00_d = ZoneOffset("+00", ZERO, -ONE_H) + P01_s = ZoneOffset("+01", ONE_H, ZERO) + + return [ + # Morocco sometimes pauses DST during Ramadan + ZoneTransition(datetime(2018, 3, 25, 2), P00_s, P01_d), + ZoneTransition(datetime(2018, 5, 13, 3), P01_d, P00_s), + ZoneTransition(datetime(2018, 6, 17, 2), P00_s, P01_d), + # On October 28th Morocco set standard time to +01, + # with negative DST only during Ramadan + ZoneTransition(datetime(2018, 10, 28, 3), P01_d, P01_s), + ZoneTransition(datetime(2019, 5, 5, 3), P01_s, P00_d), + ZoneTransition(datetime(2019, 6, 9, 2), P00_d, P01_s), + ] + + def _America_Los_Angeles(): + LMT = ZoneOffset("LMT", timedelta(seconds=-28378), ZERO) + PST = ZoneOffset("PST", timedelta(hours=-8), ZERO) + PDT = ZoneOffset("PDT", timedelta(hours=-7), ONE_H) + PWT = ZoneOffset("PWT", timedelta(hours=-7), ONE_H) + PPT = ZoneOffset("PPT", timedelta(hours=-7), ONE_H) + + return [ + ZoneTransition(datetime(1883, 11, 18, 12, 7, 2), LMT, PST), + ZoneTransition(datetime(1918, 3, 31, 2), PST, PDT), + ZoneTransition(datetime(1918, 3, 31, 2), PST, PDT), + ZoneTransition(datetime(1918, 10, 27, 2), PDT, PST), + # Transition to Pacific War Time + ZoneTransition(datetime(1942, 2, 9, 2), PST, PWT), + # Transition from Pacific War Time to Pacific Peace Time + ZoneTransition(datetime(1945, 8, 14, 16), PWT, PPT), + ZoneTransition(datetime(1945, 9, 30, 2), PPT, PST), + ZoneTransition(datetime(2015, 3, 8, 2), PST, PDT), + ZoneTransition(datetime(2015, 11, 1, 2), PDT, PST), + # After 2038: Rules continue indefinitely + ZoneTransition(datetime(2450, 3, 13, 2), PST, PDT), + ZoneTransition(datetime(2450, 11, 6, 2), PDT, PST), + ] + + def _America_Santiago(): + LMT = ZoneOffset("LMT", timedelta(seconds=-16966), ZERO) + SMT = ZoneOffset("SMT", timedelta(seconds=-16966), ZERO) + N05 = ZoneOffset("-05", timedelta(seconds=-18000), ZERO) + N04 = ZoneOffset("-04", timedelta(seconds=-14400), ZERO) + N03 = ZoneOffset("-03", timedelta(seconds=-10800), ONE_H) + + return [ + ZoneTransition(datetime(1890, 1, 1), LMT, SMT), + ZoneTransition(datetime(1910, 1, 10), SMT, N05), + ZoneTransition(datetime(1916, 7, 1), N05, SMT), + ZoneTransition(datetime(2008, 3, 30), N03, N04), + ZoneTransition(datetime(2008, 10, 12), N04, N03), + ZoneTransition(datetime(2040, 4, 8), N03, N04), + ZoneTransition(datetime(2040, 9, 2), N04, N03), + ] + + def _Asia_Tokyo(): + JST = ZoneOffset("JST", timedelta(seconds=32400), ZERO) + JDT = ZoneOffset("JDT", timedelta(seconds=36000), ONE_H) + + # Japan had DST from 1948 to 1951, and it was unusual in that + # the transition from DST to STD occurred at 25:00, and is + # denominated as such in the time zone database + return [ + ZoneTransition(datetime(1948, 5, 2), JST, JDT), + ZoneTransition(datetime(1948, 9, 12, 1), JDT, JST), + ZoneTransition(datetime(1951, 9, 9, 1), JDT, JST), + ] + + def _Australia_Sydney(): + LMT = ZoneOffset("LMT", timedelta(seconds=36292), ZERO) + AEST = ZoneOffset("AEST", timedelta(seconds=36000), ZERO) + AEDT = ZoneOffset("AEDT", timedelta(seconds=39600), ONE_H) + + return [ + ZoneTransition(datetime(1895, 2, 1), LMT, AEST), + ZoneTransition(datetime(1917, 1, 1, 0, 1), AEST, AEDT), + ZoneTransition(datetime(1917, 3, 25, 2), AEDT, AEST), + ZoneTransition(datetime(2012, 4, 1, 3), AEDT, AEST), + ZoneTransition(datetime(2012, 10, 7, 2), AEST, AEDT), + ZoneTransition(datetime(2040, 4, 1, 3), AEDT, AEST), + ZoneTransition(datetime(2040, 10, 7, 2), AEST, AEDT), + ] + + def _Europe_Dublin(): + LMT = ZoneOffset("LMT", timedelta(seconds=-1500), ZERO) + DMT = ZoneOffset("DMT", timedelta(seconds=-1521), ZERO) + IST_0 = ZoneOffset("IST", timedelta(seconds=2079), ONE_H) + GMT_0 = ZoneOffset("GMT", ZERO, ZERO) + BST = ZoneOffset("BST", ONE_H, ONE_H) + GMT_1 = ZoneOffset("GMT", ZERO, -ONE_H) + IST_1 = ZoneOffset("IST", ONE_H, ZERO) + + return [ + ZoneTransition(datetime(1880, 8, 2, 0), LMT, DMT), + ZoneTransition(datetime(1916, 5, 21, 2), DMT, IST_0), + ZoneTransition(datetime(1916, 10, 1, 3), IST_0, GMT_0), + ZoneTransition(datetime(1917, 4, 8, 2), GMT_0, BST), + ZoneTransition(datetime(2016, 3, 27, 1), GMT_1, IST_1), + ZoneTransition(datetime(2016, 10, 30, 2), IST_1, GMT_1), + ZoneTransition(datetime(2487, 3, 30, 1), GMT_1, IST_1), + ZoneTransition(datetime(2487, 10, 26, 2), IST_1, GMT_1), + ] + + def _Europe_Lisbon(): + WET = ZoneOffset("WET", ZERO, ZERO) + WEST = ZoneOffset("WEST", ONE_H, ONE_H) + CET = ZoneOffset("CET", ONE_H, ZERO) + CEST = ZoneOffset("CEST", timedelta(seconds=7200), ONE_H) + + return [ + ZoneTransition(datetime(1992, 3, 29, 1), WET, WEST), + ZoneTransition(datetime(1992, 9, 27, 2), WEST, CET), + ZoneTransition(datetime(1993, 3, 28, 2), CET, CEST), + ZoneTransition(datetime(1993, 9, 26, 3), CEST, CET), + ZoneTransition(datetime(1996, 3, 31, 2), CET, WEST), + ZoneTransition(datetime(1996, 10, 27, 2), WEST, WET), + ] + + def _Europe_London(): + LMT = ZoneOffset("LMT", timedelta(seconds=-75), ZERO) + GMT = ZoneOffset("GMT", ZERO, ZERO) + BST = ZoneOffset("BST", ONE_H, ONE_H) + + return [ + ZoneTransition(datetime(1847, 12, 1), LMT, GMT), + ZoneTransition(datetime(2005, 3, 27, 1), GMT, BST), + ZoneTransition(datetime(2005, 10, 30, 2), BST, GMT), + ZoneTransition(datetime(2043, 3, 29, 1), GMT, BST), + ZoneTransition(datetime(2043, 10, 25, 2), BST, GMT), + ] + + def _Pacific_Kiritimati(): + LMT = ZoneOffset("LMT", timedelta(seconds=-37760), ZERO) + N1040 = ZoneOffset("-1040", timedelta(seconds=-38400), ZERO) + N10 = ZoneOffset("-10", timedelta(seconds=-36000), ZERO) + P14 = ZoneOffset("+14", timedelta(seconds=50400), ZERO) + + # This is literally every transition in Christmas Island history + return [ + ZoneTransition(datetime(1901, 1, 1), LMT, N1040), + ZoneTransition(datetime(1979, 10, 1), N1040, N10), + # They skipped December 31, 1994 + ZoneTransition(datetime(1994, 12, 31), N10, P14), + ] + + cls._ZONEDUMP_DATA = { + "Africa/Abidjan": _Africa_Abidjan(), + "Africa/Casablanca": _Africa_Casablanca(), + "America/Los_Angeles": _America_Los_Angeles(), + "America/Santiago": _America_Santiago(), + "Australia/Sydney": _Australia_Sydney(), + "Asia/Tokyo": _Asia_Tokyo(), + "Europe/Dublin": _Europe_Dublin(), + "Europe/Lisbon": _Europe_Lisbon(), + "Europe/London": _Europe_London(), + "Pacific/Kiritimati": _Pacific_Kiritimati(), + } + + _ZONEDUMP_DATA = None + _FIXED_OFFSET_ZONES = None diff --git a/Lib/zoneinfo/__init__.py b/Lib/zoneinfo/__init__.py new file mode 100644 index 0000000000000..81a2d5ea97be0 --- /dev/null +++ b/Lib/zoneinfo/__init__.py @@ -0,0 +1,29 @@ +__all__ = [ + "ZoneInfo", + "reset_tzpath", + "TZPATH", + "ZoneInfoNotFoundError", + "InvalidTZPathWarning", +] + +from . import _tzpath +from ._common import ZoneInfoNotFoundError + +try: + from _zoneinfo import ZoneInfo +except ImportError: # pragma: nocover + from ._zoneinfo import ZoneInfo + +reset_tzpath = _tzpath.reset_tzpath +InvalidTZPathWarning = _tzpath.InvalidTZPathWarning + + +def __getattr__(name): + if name == "TZPATH": + return _tzpath.TZPATH + else: + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + + +def __dir__(): + return sorted(list(globals()) + ["TZPATH"]) diff --git a/Lib/zoneinfo/_common.py b/Lib/zoneinfo/_common.py new file mode 100644 index 0000000000000..3d35d4f4b463f --- /dev/null +++ b/Lib/zoneinfo/_common.py @@ -0,0 +1,166 @@ +import struct + + +def load_tzdata(key): + import importlib.resources + + components = key.split("/") + package_name = ".".join(["tzdata.zoneinfo"] + components[:-1]) + resource_name = components[-1] + + try: + return importlib.resources.open_binary(package_name, resource_name) + except (ImportError, FileNotFoundError, UnicodeEncodeError): + # There are three types of exception that can be raised that all amount + # to "we cannot find this key": + # + # ImportError: If package_name doesn't exist (e.g. if tzdata is not + # installed, or if there's an error in the folder name like + # Amrica/New_York) + # FileNotFoundError: If resource_name doesn't exist in the package + # (e.g. Europe/Krasnoy) + # UnicodeEncodeError: If package_name or resource_name are not UTF-8, + # such as keys containing a surrogate character. + raise ZoneInfoNotFoundError(f"No time zone found with key {key}") + + +def load_data(fobj): + header = _TZifHeader.from_file(fobj) + + if header.version == 1: + time_size = 4 + time_type = "l" + else: + # Version 2+ has 64-bit integer transition times + time_size = 8 + time_type = "q" + + # Version 2+ also starts with a Version 1 header and data, which + # we need to skip now + skip_bytes = ( + header.timecnt * 5 # Transition times and types + + header.typecnt * 6 # Local time type records + + header.charcnt # Time zone designations + + header.leapcnt * 8 # Leap second records + + header.isstdcnt # Standard/wall indicators + + header.isutcnt # UT/local indicators + ) + + fobj.seek(skip_bytes, 1) + + # Now we need to read the second header, which is not the same + # as the first + header = _TZifHeader.from_file(fobj) + + typecnt = header.typecnt + timecnt = header.timecnt + charcnt = header.charcnt + + # The data portion starts with timecnt transitions and indices + if timecnt: + trans_list_utc = struct.unpack( + f">{timecnt}{time_type}", fobj.read(timecnt * time_size) + ) + trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt)) + else: + trans_list_utc = () + trans_idx = () + + # Read the ttinfo struct, (utoff, isdst, abbrind) + if typecnt: + utcoff, isdst, abbrind = zip( + *(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt)) + ) + else: + utcoff = () + isdst = () + abbrind = () + + # Now read the abbreviations. They are null-terminated strings, indexed + # not by position in the array but by position in the unsplit + # abbreviation string. I suppose this makes more sense in C, which uses + # null to terminate the strings, but it's inconvenient here... + char_total = 0 + abbr_vals = {} + abbr_chars = fobj.read(charcnt) + + def get_abbr(idx): + # Gets a string starting at idx and running until the next \x00 + # + # We cannot pre-populate abbr_vals by splitting on \x00 because there + # are some zones that use subsets of longer abbreviations, like so: + # + # LMT\x00AHST\x00HDT\x00 + # + # Where the idx to abbr mapping should be: + # + # {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"} + if idx not in abbr_vals: + span_end = abbr_chars.find(b"\x00", idx) + abbr_vals[idx] = abbr_chars[idx:span_end].decode() + + return abbr_vals[idx] + + abbr = tuple(get_abbr(idx) for idx in abbrind) + + # The remainder of the file consists of leap seconds (currently unused) and + # the standard/wall and ut/local indicators, which are metadata we don't need. + # In version 2 files, we need to skip the unnecessary data to get at the TZ string: + if header.version >= 2: + # Each leap second record has size (time_size + 4) + skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12 + fobj.seek(skip_bytes, 1) + + c = fobj.read(1) # Should be \n + assert c == b"\n", c + + tz_bytes = b"" + while (c := fobj.read(1)) != b"\n": + tz_bytes += c + + tz_str = tz_bytes + else: + tz_str = None + + return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str + + +class _TZifHeader: + __slots__ = [ + "version", + "isutcnt", + "isstdcnt", + "leapcnt", + "timecnt", + "typecnt", + "charcnt", + ] + + def __init__(self, *args): + assert len(self.__slots__) == len(args) + for attr, val in zip(self.__slots__, args): + setattr(self, attr, val) + + @classmethod + def from_file(cls, stream): + # The header starts with a 4-byte "magic" value + if stream.read(4) != b"TZif": + raise ValueError("Invalid TZif file: magic not found") + + _version = stream.read(1) + if _version == b"\x00": + version = 1 + else: + version = int(_version) + stream.read(15) + + args = (version,) + + # Slots are defined in the order that the bytes are arranged + args = args + struct.unpack(">6l", stream.read(24)) + + return cls(*args) + + +class ZoneInfoNotFoundError(KeyError): + """Exception raised when a ZoneInfo key is not found.""" diff --git a/Lib/zoneinfo/_tzpath.py b/Lib/zoneinfo/_tzpath.py new file mode 100644 index 0000000000000..8cff0b171bf32 --- /dev/null +++ b/Lib/zoneinfo/_tzpath.py @@ -0,0 +1,110 @@ +import os +import sys +import sysconfig + + +def reset_tzpath(to=None): + global TZPATH + + tzpaths = to + if tzpaths is not None: + if isinstance(tzpaths, (str, bytes)): + raise TypeError( + f"tzpaths must be a list or tuple, " + + f"not {type(tzpaths)}: {tzpaths!r}" + ) + elif not all(map(os.path.isabs, tzpaths)): + raise ValueError(_get_invalid_paths_message(tzpaths)) + base_tzpath = tzpaths + else: + env_var = os.environ.get("PYTHONTZPATH", None) + if env_var is not None: + base_tzpath = _parse_python_tzpath(env_var) + else: + base_tzpath = _parse_python_tzpath( + sysconfig.get_config_var("TZPATH") + ) + + TZPATH = tuple(base_tzpath) + + +def _parse_python_tzpath(env_var): + if not env_var: + return () + + raw_tzpath = env_var.split(os.pathsep) + new_tzpath = tuple(filter(os.path.isabs, raw_tzpath)) + + # If anything has been filtered out, we will warn about it + if len(new_tzpath) != len(raw_tzpath): + import warnings + + msg = _get_invalid_paths_message(raw_tzpath) + + warnings.warn( + "Invalid paths specified in PYTHONTZPATH environment variable." + + msg, + InvalidTZPathWarning, + ) + + return new_tzpath + + +def _get_invalid_paths_message(tzpaths): + invalid_paths = (path for path in tzpaths if not os.path.isabs(path)) + + prefix = "\n " + indented_str = prefix + prefix.join(invalid_paths) + + return ( + "Paths should be absolute but found the following relative paths:" + + indented_str + ) + + +def find_tzfile(key): + """Retrieve the path to a TZif file from a key.""" + _validate_tzfile_path(key) + for search_path in TZPATH: + filepath = os.path.join(search_path, key) + if os.path.isfile(filepath): + return filepath + + return None + + +_TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1] + + +def _validate_tzfile_path(path, _base=_TEST_PATH): + if os.path.isabs(path): + raise ValueError( + f"ZoneInfo keys may not be absolute paths, got: {path}" + ) + + # We only care about the kinds of path normalizations that would change the + # length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows, + # normpath will also change from a/b to a\b, but that would still preserve + # the length. + new_path = os.path.normpath(path) + if len(new_path) != len(path): + raise ValueError( + f"ZoneInfo keys must be normalized relative paths, got: {path}" + ) + + resolved = os.path.normpath(os.path.join(_base, new_path)) + if not resolved.startswith(_base): + raise ValueError( + f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}" + ) + + +del _TEST_PATH + + +class InvalidTZPathWarning(RuntimeWarning): + """Warning raised if an invalid path is specified in PYTHONTZPATH.""" + + +TZPATH = () +reset_tzpath() diff --git a/Lib/zoneinfo/_zoneinfo.py b/Lib/zoneinfo/_zoneinfo.py new file mode 100644 index 0000000000000..69133ae80a493 --- /dev/null +++ b/Lib/zoneinfo/_zoneinfo.py @@ -0,0 +1,755 @@ +import bisect +import calendar +import collections +import functools +import os +import re +import struct +import sys +import weakref +from datetime import datetime, timedelta, timezone, tzinfo + +from . import _common, _tzpath + +EPOCH = datetime(1970, 1, 1) +EPOCHORDINAL = datetime(1970, 1, 1).toordinal() + +# It is relatively expensive to construct new timedelta objects, and in most +# cases we're looking at the same deltas, like integer numbers of hours, etc. +# To improve speed and memory use, we'll keep a dictionary with references +# to the ones we've already used so far. +# +# Loading every time zone in the 2020a version of the time zone database +# requires 447 timedeltas, which requires approximately the amount of space +# that ZoneInfo("America/New_York") with 236 transitions takes up, so we will +# set the cache size to 512 so that in the common case we always get cache +# hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts +# of memory. + at functools.lru_cache(maxsize=512) +def _load_timedelta(seconds): + return timedelta(seconds=seconds) + + +class ZoneInfo(tzinfo): + _strong_cache_size = 8 + _strong_cache = collections.OrderedDict() + _weak_cache = weakref.WeakValueDictionary() + __module__ = "zoneinfo" + + def __init_subclass__(cls): + cls._strong_cache = collections.OrderedDict() + cls._weak_cache = weakref.WeakValueDictionary() + + def __new__(cls, key): + instance = cls._weak_cache.get(key, None) + if instance is None: + instance = cls._weak_cache.setdefault(key, cls._new_instance(key)) + instance._from_cache = True + + # Update the "strong" cache + cls._strong_cache[key] = cls._strong_cache.pop(key, instance) + + if len(cls._strong_cache) > cls._strong_cache_size: + cls._strong_cache.popitem(last=False) + + return instance + + @classmethod + def no_cache(cls, key): + obj = cls._new_instance(key) + obj._from_cache = False + + return obj + + @classmethod + def _new_instance(cls, key): + obj = super().__new__(cls) + obj._key = key + obj._file_path = obj._find_tzfile(key) + + if obj._file_path is not None: + file_obj = open(obj._file_path, "rb") + else: + file_obj = _common.load_tzdata(key) + + with file_obj as f: + obj._load_file(f) + + return obj + + @classmethod + def from_file(cls, fobj, /, key=None): + obj = super().__new__(cls) + obj._key = key + obj._file_path = None + obj._load_file(fobj) + obj._file_repr = repr(fobj) + + # Disable pickling for objects created from files + obj.__reduce__ = obj._file_reduce + + return obj + + @classmethod + def clear_cache(cls, *, only_keys=None): + if only_keys is not None: + for key in only_keys: + cls._weak_cache.pop(key, None) + cls._strong_cache.pop(key, None) + + else: + cls._weak_cache.clear() + cls._strong_cache.clear() + + @property + def key(self): + return self._key + + def utcoffset(self, dt): + return self._find_trans(dt).utcoff + + def dst(self, dt): + return self._find_trans(dt).dstoff + + def tzname(self, dt): + return self._find_trans(dt).tzname + + def fromutc(self, dt): + """Convert from datetime in UTC to datetime in local time""" + + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + timestamp = self._get_local_timestamp(dt) + num_trans = len(self._trans_utc) + + if num_trans >= 1 and timestamp < self._trans_utc[0]: + tti = self._tti_before + fold = 0 + elif ( + num_trans == 0 or timestamp > self._trans_utc[-1] + ) and not isinstance(self._tz_after, _ttinfo): + tti, fold = self._tz_after.get_trans_info_fromutc( + timestamp, dt.year + ) + elif num_trans == 0: + tti = self._tz_after + fold = 0 + else: + idx = bisect.bisect_right(self._trans_utc, timestamp) + + if num_trans > 1 and timestamp >= self._trans_utc[1]: + tti_prev, tti = self._ttinfos[idx - 2 : idx] + elif timestamp > self._trans_utc[-1]: + tti_prev = self._ttinfos[-1] + tti = self._tz_after + else: + tti_prev = self._tti_before + tti = self._ttinfos[0] + + # Detect fold + shift = tti_prev.utcoff - tti.utcoff + fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1] + dt += tti.utcoff + if fold: + return dt.replace(fold=1) + else: + return dt + + def _find_trans(self, dt): + if dt is None: + if self._fixed_offset: + return self._tz_after + else: + return _NO_TTINFO + + ts = self._get_local_timestamp(dt) + + lt = self._trans_local[dt.fold] + + num_trans = len(lt) + + if num_trans and ts < lt[0]: + return self._tti_before + elif not num_trans or ts > lt[-1]: + if isinstance(self._tz_after, _TZStr): + return self._tz_after.get_trans_info(ts, dt.year, dt.fold) + else: + return self._tz_after + else: + # idx is the transition that occurs after this timestamp, so we + # subtract off 1 to get the current ttinfo + idx = bisect.bisect_right(lt, ts) - 1 + assert idx >= 0 + return self._ttinfos[idx] + + def _get_local_timestamp(self, dt): + return ( + (dt.toordinal() - EPOCHORDINAL) * 86400 + + dt.hour * 3600 + + dt.minute * 60 + + dt.second + ) + + def __str__(self): + if self._key is not None: + return f"{self._key}" + else: + return repr(self) + + def __repr__(self): + if self._key is not None: + return f"{self.__class__.__name__}(key={self._key!r})" + else: + return f"{self.__class__.__name__}.from_file({self._file_repr})" + + def __reduce__(self): + return (self.__class__._unpickle, (self._key, self._from_cache)) + + def _file_reduce(self): + import pickle + + raise pickle.PicklingError( + "Cannot pickle a ZoneInfo file created from a file stream." + ) + + @classmethod + def _unpickle(cls, key, from_cache, /): + if from_cache: + return cls(key) + else: + return cls.no_cache(key) + + def _find_tzfile(self, key): + return _tzpath.find_tzfile(key) + + def _load_file(self, fobj): + # Retrieve all the data as it exists in the zoneinfo file + trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data( + fobj + ) + + # Infer the DST offsets (needed for .dst()) from the data + dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst) + + # Convert all the transition times (UTC) into "seconds since 1970-01-01 local time" + trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff) + + # Construct `_ttinfo` objects for each transition in the file + _ttinfo_list = [ + _ttinfo( + _load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname + ) + for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr) + ] + + self._trans_utc = trans_utc + self._trans_local = trans_local + self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx] + + # Find the first non-DST transition + for i in range(len(isdst)): + if not isdst[i]: + self._tti_before = _ttinfo_list[i] + break + else: + if self._ttinfos: + self._tti_before = self._ttinfos[0] + else: + self._tti_before = None + + # Set the "fallback" time zone + if tz_str is not None and tz_str != b"": + self._tz_after = _parse_tz_str(tz_str.decode()) + else: + if not self._ttinfos and not _ttinfo_list: + raise ValueError("No time zone information found.") + + if self._ttinfos: + self._tz_after = self._ttinfos[-1] + else: + self._tz_after = _ttinfo_list[-1] + + # Determine if this is a "fixed offset" zone, meaning that the output + # of the utcoffset, dst and tzname functions does not depend on the + # specific datetime passed. + # + # We make three simplifying assumptions here: + # + # 1. If _tz_after is not a _ttinfo, it has transitions that might + # actually occur (it is possible to construct TZ strings that + # specify STD and DST but no transitions ever occur, such as + # AAA0BBB,0/0,J365/25). + # 2. If _ttinfo_list contains more than one _ttinfo object, the objects + # represent different offsets. + # 3. _ttinfo_list contains no unused _ttinfos (in which case an + # otherwise fixed-offset zone with extra _ttinfos defined may + # appear to *not* be a fixed offset zone). + # + # Violations to these assumptions would be fairly exotic, and exotic + # zones should almost certainly not be used with datetime.time (the + # only thing that would be affected by this). + if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo): + self._fixed_offset = False + elif not _ttinfo_list: + self._fixed_offset = True + else: + self._fixed_offset = _ttinfo_list[0] == self._tz_after + + @staticmethod + def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts): + # Now we must transform our ttis and abbrs into `_ttinfo` objects, + # but there is an issue: .dst() must return a timedelta with the + # difference between utcoffset() and the "standard" offset, but + # the "base offset" and "DST offset" are not encoded in the file; + # we can infer what they are from the isdst flag, but it is not + # sufficient to to just look at the last standard offset, because + # occasionally countries will shift both DST offset and base offset. + + typecnt = len(isdsts) + dstoffs = [0] * typecnt # Provisionally assign all to 0. + dst_cnt = sum(isdsts) + dst_found = 0 + + for i in range(1, len(trans_idx)): + if dst_cnt == dst_found: + break + + idx = trans_idx[i] + + dst = isdsts[idx] + + # We're only going to look at daylight saving time + if not dst: + continue + + # Skip any offsets that have already been assigned + if dstoffs[idx] != 0: + continue + + dstoff = 0 + utcoff = utcoffsets[idx] + + comp_idx = trans_idx[i - 1] + + if not isdsts[comp_idx]: + dstoff = utcoff - utcoffsets[comp_idx] + + if not dstoff and idx < (typecnt - 1): + comp_idx = trans_idx[i + 1] + + # If the following transition is also DST and we couldn't + # find the DST offset by this point, we're going ot have to + # skip it and hope this transition gets assigned later + if isdsts[comp_idx]: + continue + + dstoff = utcoff - utcoffsets[comp_idx] + + if dstoff: + dst_found += 1 + dstoffs[idx] = dstoff + else: + # If we didn't find a valid value for a given index, we'll end up + # with dstoff = 0 for something where `isdst=1`. This is obviously + # wrong - one hour will be a much better guess than 0 + for idx in range(typecnt): + if not dstoffs[idx] and isdsts[idx]: + dstoffs[idx] = 3600 + + return dstoffs + + @staticmethod + def _ts_to_local(trans_idx, trans_list_utc, utcoffsets): + """Generate number of seconds since 1970 *in the local time*. + + This is necessary to easily find the transition times in local time""" + if not trans_list_utc: + return [[], []] + + # Start with the timestamps and modify in-place + trans_list_wall = [list(trans_list_utc), list(trans_list_utc)] + + if len(utcoffsets) > 1: + offset_0 = utcoffsets[0] + offset_1 = utcoffsets[trans_idx[0]] + if offset_1 > offset_0: + offset_1, offset_0 = offset_0, offset_1 + else: + offset_0 = offset_1 = utcoffsets[0] + + trans_list_wall[0][0] += offset_0 + trans_list_wall[1][0] += offset_1 + + for i in range(1, len(trans_idx)): + offset_0 = utcoffsets[trans_idx[i - 1]] + offset_1 = utcoffsets[trans_idx[i]] + + if offset_1 > offset_0: + offset_1, offset_0 = offset_0, offset_1 + + trans_list_wall[0][i] += offset_0 + trans_list_wall[1][i] += offset_1 + + return trans_list_wall + + +class _ttinfo: + __slots__ = ["utcoff", "dstoff", "tzname"] + + def __init__(self, utcoff, dstoff, tzname): + self.utcoff = utcoff + self.dstoff = dstoff + self.tzname = tzname + + def __eq__(self, other): + return ( + self.utcoff == other.utcoff + and self.dstoff == other.dstoff + and self.tzname == other.tzname + ) + + def __repr__(self): # pragma: nocover + return ( + f"{self.__class__.__name__}" + + f"({self.utcoff}, {self.dstoff}, {self.tzname})" + ) + + +_NO_TTINFO = _ttinfo(None, None, None) + + +class _TZStr: + __slots__ = ( + "std", + "dst", + "start", + "end", + "get_trans_info", + "get_trans_info_fromutc", + "dst_diff", + ) + + def __init__( + self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None + ): + self.dst_diff = dst_offset - std_offset + std_offset = _load_timedelta(std_offset) + self.std = _ttinfo( + utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr + ) + + self.start = start + self.end = end + + dst_offset = _load_timedelta(dst_offset) + delta = _load_timedelta(self.dst_diff) + self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr) + + # These are assertions because the constructor should only be called + # by functions that would fail before passing start or end + assert start is not None, "No transition start specified" + assert end is not None, "No transition end specified" + + self.get_trans_info = self._get_trans_info + self.get_trans_info_fromutc = self._get_trans_info_fromutc + + def transitions(self, year): + start = self.start.year_to_epoch(year) + end = self.end.year_to_epoch(year) + return start, end + + def _get_trans_info(self, ts, year, fold): + """Get the information about the current transition - tti""" + start, end = self.transitions(year) + + # With fold = 0, the period (denominated in local time) with the + # smaller offset starts at the end of the gap and ends at the end of + # the fold; with fold = 1, it runs from the start of the gap to the + # beginning of the fold. + # + # So in order to determine the DST boundaries we need to know both + # the fold and whether DST is positive or negative (rare), and it + # turns out that this boils down to fold XOR is_positive. + if fold == (self.dst_diff >= 0): + end -= self.dst_diff + else: + start += self.dst_diff + + if start < end: + isdst = start <= ts < end + else: + isdst = not (end <= ts < start) + + return self.dst if isdst else self.std + + def _get_trans_info_fromutc(self, ts, year): + start, end = self.transitions(year) + start -= self.std.utcoff.total_seconds() + end -= self.dst.utcoff.total_seconds() + + if start < end: + isdst = start <= ts < end + else: + isdst = not (end <= ts < start) + + # For positive DST, the ambiguous period is one dst_diff after the end + # of DST; for negative DST, the ambiguous period is one dst_diff before + # the start of DST. + if self.dst_diff > 0: + ambig_start = end + ambig_end = end + self.dst_diff + else: + ambig_start = start + ambig_end = start - self.dst_diff + + fold = ambig_start <= ts < ambig_end + + return (self.dst if isdst else self.std, fold) + + +def _post_epoch_days_before_year(year): + """Get the number of days between 1970-01-01 and YEAR-01-01""" + y = year - 1 + return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL + + +class _DayOffset: + __slots__ = ["d", "julian", "hour", "minute", "second"] + + def __init__(self, d, julian, hour=2, minute=0, second=0): + if not (0 + julian) <= d <= 365: + min_day = 0 + julian + raise ValueError(f"d must be in [{min_day}, 365], not: {d}") + + self.d = d + self.julian = julian + self.hour = hour + self.minute = minute + self.second = second + + def year_to_epoch(self, year): + days_before_year = _post_epoch_days_before_year(year) + + d = self.d + if self.julian and d >= 59 and calendar.isleap(year): + d += 1 + + epoch = (days_before_year + d) * 86400 + epoch += self.hour * 3600 + self.minute * 60 + self.second + + return epoch + + +class _CalendarOffset: + __slots__ = ["m", "w", "d", "hour", "minute", "second"] + + _DAYS_BEFORE_MONTH = ( + -1, + 0, + 31, + 59, + 90, + 120, + 151, + 181, + 212, + 243, + 273, + 304, + 334, + ) + + def __init__(self, m, w, d, hour=2, minute=0, second=0): + if not 0 < m <= 12: + raise ValueError("m must be in (0, 12]") + + if not 0 < w <= 5: + raise ValueError("w must be in (0, 5]") + + if not 0 <= d <= 6: + raise ValueError("d must be in [0, 6]") + + self.m = m + self.w = w + self.d = d + self.hour = hour + self.minute = minute + self.second = second + + @classmethod + def _ymd2ord(cls, year, month, day): + return ( + _post_epoch_days_before_year(year) + + cls._DAYS_BEFORE_MONTH[month] + + (month > 2 and calendar.isleap(year)) + + day + ) + + # TODO: These are not actually epoch dates as they are expressed in local time + def year_to_epoch(self, year): + """Calculates the datetime of the occurrence from the year""" + # We know year and month, we need to convert w, d into day of month + # + # Week 1 is the first week in which day `d` (where 0 = Sunday) appears. + # Week 5 represents the last occurrence of day `d`, so we need to know + # the range of the month. + first_day, days_in_month = calendar.monthrange(year, self.m) + + # This equation seems magical, so I'll break it down: + # 1. calendar says 0 = Monday, POSIX says 0 = Sunday + # so we need first_day + 1 to get 1 = Monday -> 7 = Sunday, + # which is still equivalent because this math is mod 7 + # 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need + # to do anything to adjust negative numbers. + # 3. Add 1 because month days are a 1-based index. + month_day = (self.d - (first_day + 1)) % 7 + 1 + + # Now use a 0-based index version of `w` to calculate the w-th + # occurrence of `d` + month_day += (self.w - 1) * 7 + + # month_day will only be > days_in_month if w was 5, and `w` means + # "last occurrence of `d`", so now we just check if we over-shot the + # end of the month and if so knock off 1 week. + if month_day > days_in_month: + month_day -= 7 + + ordinal = self._ymd2ord(year, self.m, month_day) + epoch = ordinal * 86400 + epoch += self.hour * 3600 + self.minute * 60 + self.second + return epoch + + +def _parse_tz_str(tz_str): + # The tz string has the format: + # + # std[offset[dst[offset],start[/time],end[/time]]] + # + # std and dst must be 3 or more characters long and must not contain + # a leading colon, embedded digits, commas, nor a plus or minus signs; + # The spaces between "std" and "offset" are only for display and are + # not actually present in the string. + # + # The format of the offset is ``[+|-]hh[:mm[:ss]]`` + + offset_str, *start_end_str = tz_str.split(",", 1) + + # fmt: off + parser_re = re.compile( + r"(?P[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" + + r"((?P[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" + + r"((?P[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" + + r"((?P[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" + + r")?" + # dst + r")?$" # stdoff + ) + # fmt: on + + m = parser_re.match(offset_str) + + if m is None: + raise ValueError(f"{tz_str} is not a valid TZ string") + + std_abbr = m.group("std") + dst_abbr = m.group("dst") + dst_offset = None + + std_abbr = std_abbr.strip("<>") + + if dst_abbr: + dst_abbr = dst_abbr.strip("<>") + + if std_offset := m.group("stdoff"): + try: + std_offset = _parse_tz_delta(std_offset) + except ValueError as e: + raise ValueError(f"Invalid STD offset in {tz_str}") from e + else: + std_offset = 0 + + if dst_abbr is not None: + if dst_offset := m.group("dstoff"): + try: + dst_offset = _parse_tz_delta(dst_offset) + except ValueError as e: + raise ValueError(f"Invalid DST offset in {tz_str}") from e + else: + dst_offset = std_offset + 3600 + + if not start_end_str: + raise ValueError(f"Missing transition rules: {tz_str}") + + start_end_strs = start_end_str[0].split(",", 1) + try: + start, end = (_parse_dst_start_end(x) for x in start_end_strs) + except ValueError as e: + raise ValueError(f"Invalid TZ string: {tz_str}") from e + + return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end) + elif start_end_str: + raise ValueError(f"Transition rule present without DST: {tz_str}") + else: + # This is a static ttinfo, don't return _TZStr + return _ttinfo( + _load_timedelta(std_offset), _load_timedelta(0), std_abbr + ) + + +def _parse_dst_start_end(dststr): + date, *time = dststr.split("/") + if date[0] == "M": + n_is_julian = False + m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date) + if m is None: + raise ValueError(f"Invalid dst start/end date: {dststr}") + date_offset = tuple(map(int, m.groups())) + offset = _CalendarOffset(*date_offset) + else: + if date[0] == "J": + n_is_julian = True + date = date[1:] + else: + n_is_julian = False + + doy = int(date) + offset = _DayOffset(doy, n_is_julian) + + if time: + time_components = list(map(int, time[0].split(":"))) + n_components = len(time_components) + if n_components < 3: + time_components.extend([0] * (3 - n_components)) + offset.hour, offset.minute, offset.second = time_components + + return offset + + +def _parse_tz_delta(tz_delta): + match = re.match( + r"(?P[+-])?(?P\d{1,2})(:(?P\d{2})(:(?P\d{2}))?)?", + tz_delta, + ) + # Anything passed to this function should already have hit an equivalent + # regular expression to find the section to parse. + assert match is not None, tz_delta + + h, m, s = ( + int(v) if v is not None else 0 + for v in map(match.group, ("h", "m", "s")) + ) + + total = h * 3600 + m * 60 + s + + if not -86400 < total < 86400: + raise ValueError( + "Offset must be strictly between -24h and +24h:" + tz_delta + ) + + # Yes, +5 maps to an offset of -5h + if match.group("sign") != "-": + total *= -1 + + return total diff --git a/Makefile.pre.in b/Makefile.pre.in index d545a9efb3cd9..dbfd805f1a02f 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -197,6 +197,9 @@ OPENSSL_INCLUDES=@OPENSSL_INCLUDES@ OPENSSL_LIBS=@OPENSSL_LIBS@ OPENSSL_LDFLAGS=@OPENSSL_LDFLAGS@ +# Default zoneinfo.TZPATH. Added here to expose it in sysconfig.get_config_var +TZPATH=@TZPATH@ + # Modes for directories, executables and data files created by the # install process. Default to user-only-writable for all file types. DIRMODE= 755 diff --git a/Misc/requirements-test.txt b/Misc/requirements-test.txt new file mode 100644 index 0000000000000..6e46c12e4f9d1 --- /dev/null +++ b/Misc/requirements-test.txt @@ -0,0 +1 @@ +tzdata==2020.1rc0 diff --git a/Modules/Setup b/Modules/Setup index 87e73bac78fae..02cfb67518df7 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -181,6 +181,7 @@ _symtable symtablemodule.c #_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator #_pickle _pickle.c # pickle accelerator #_datetime _datetimemodule.c # datetime accelerator +#_zoneinfo _zoneinfo.c # zoneinfo accelerator #_bisect _bisectmodule.c # Bisection algorithms #_heapq _heapqmodule.c # Heap queue algorithm #_asyncio _asynciomodule.c # Fast asyncio Future diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c new file mode 100644 index 0000000000000..9f5e64d8486cc --- /dev/null +++ b/Modules/_zoneinfo.c @@ -0,0 +1,2695 @@ +#include "Python.h" +#include "structmember.h" + +#include +#include +#include + +#include "datetime.h" + +// Imports +static PyObject *io_open = NULL; +static PyObject *_tzpath_find_tzfile = NULL; +static PyObject *_common_mod = NULL; + +typedef struct TransitionRuleType TransitionRuleType; +typedef struct StrongCacheNode StrongCacheNode; + +typedef struct { + PyObject *utcoff; + PyObject *dstoff; + PyObject *tzname; + long utcoff_seconds; +} _ttinfo; + +typedef struct { + _ttinfo std; + _ttinfo dst; + int dst_diff; + TransitionRuleType *start; + TransitionRuleType *end; + unsigned char std_only; +} _tzrule; + +typedef struct { + PyDateTime_TZInfo base; + PyObject *key; + PyObject *file_repr; + PyObject *weakreflist; + unsigned int num_transitions; + unsigned int num_ttinfos; + int64_t *trans_list_utc; + int64_t *trans_list_wall[2]; + _ttinfo **trans_ttinfos; // References to the ttinfo for each transition + _ttinfo *ttinfo_before; + _tzrule tzrule_after; + _ttinfo *_ttinfos; // Unique array of ttinfos for ease of deallocation + unsigned char fixed_offset; + unsigned char source; +} PyZoneInfo_ZoneInfo; + +struct TransitionRuleType { + int64_t (*year_to_timestamp)(TransitionRuleType *, int); +}; + +typedef struct { + TransitionRuleType base; + uint8_t month; + uint8_t week; + uint8_t day; + int8_t hour; + int8_t minute; + int8_t second; +} CalendarRule; + +typedef struct { + TransitionRuleType base; + uint8_t julian; + unsigned int day; + int8_t hour; + int8_t minute; + int8_t second; +} DayRule; + +struct StrongCacheNode { + StrongCacheNode *next; + StrongCacheNode *prev; + PyObject *key; + PyObject *zone; +}; + +static PyTypeObject PyZoneInfo_ZoneInfoType; + +// Globals +static PyObject *TIMEDELTA_CACHE = NULL; +static PyObject *ZONEINFO_WEAK_CACHE = NULL; +static StrongCacheNode *ZONEINFO_STRONG_CACHE = NULL; +static size_t ZONEINFO_STRONG_CACHE_MAX_SIZE = 8; + +static _ttinfo NO_TTINFO = {NULL, NULL, NULL, 0}; + +// Constants +static const int EPOCHORDINAL = 719163; +static int DAYS_IN_MONTH[] = { + -1, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, +}; + +static int DAYS_BEFORE_MONTH[] = { + -1, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, +}; + +static const int SOURCE_NOCACHE = 0; +static const int SOURCE_CACHE = 1; +static const int SOURCE_FILE = 2; + +// Forward declarations +static int +load_data(PyZoneInfo_ZoneInfo *self, PyObject *file_obj); +static void +utcoff_to_dstoff(size_t *trans_idx, long *utcoffs, long *dstoffs, + unsigned char *isdsts, size_t num_transitions, + size_t num_ttinfos); +static int +ts_to_local(size_t *trans_idx, int64_t *trans_utc, long *utcoff, + int64_t *trans_local[2], size_t num_ttinfos, + size_t num_transitions); + +static int +parse_tz_str(PyObject *tz_str_obj, _tzrule *out); + +static ssize_t +parse_abbr(const char *const p, PyObject **abbr); +static ssize_t +parse_tz_delta(const char *const p, long *total_seconds); +static ssize_t +parse_transition_time(const char *const p, int8_t *hour, int8_t *minute, + int8_t *second); +static ssize_t +parse_transition_rule(const char *const p, TransitionRuleType **out); + +static _ttinfo * +find_tzrule_ttinfo(_tzrule *rule, int64_t ts, unsigned char fold, int year); +static _ttinfo * +find_tzrule_ttinfo_fromutc(_tzrule *rule, int64_t ts, int year, + unsigned char *fold); + +static int +build_ttinfo(long utcoffset, long dstoffset, PyObject *tzname, _ttinfo *out); +static void +xdecref_ttinfo(_ttinfo *ttinfo); +static int +ttinfo_eq(const _ttinfo *const tti0, const _ttinfo *const tti1); + +static int +build_tzrule(PyObject *std_abbr, PyObject *dst_abbr, long std_offset, + long dst_offset, TransitionRuleType *start, + TransitionRuleType *end, _tzrule *out); +static void +free_tzrule(_tzrule *tzrule); + +static PyObject * +load_timedelta(long seconds); + +static int +get_local_timestamp(PyObject *dt, int64_t *local_ts); +static _ttinfo * +find_ttinfo(PyZoneInfo_ZoneInfo *self, PyObject *dt); + +static int +ymd_to_ord(int y, int m, int d); +static int +is_leap_year(int year); + +static size_t +_bisect(const int64_t value, const int64_t *arr, size_t size); + +static void +eject_from_strong_cache(const PyTypeObject *const type, PyObject *key); +static void +clear_strong_cache(const PyTypeObject *const type); +static void +update_strong_cache(const PyTypeObject *const type, PyObject *key, + PyObject *zone); +static PyObject * +zone_from_strong_cache(const PyTypeObject *const type, PyObject *key); + +static PyObject * +zoneinfo_new_instance(PyTypeObject *type, PyObject *key) +{ + PyObject *file_obj = NULL; + PyObject *file_path = NULL; + + file_path = PyObject_CallFunctionObjArgs(_tzpath_find_tzfile, key, NULL); + if (file_path == NULL) { + return NULL; + } + else if (file_path == Py_None) { + file_obj = PyObject_CallMethod(_common_mod, "load_tzdata", "O", key); + if (file_obj == NULL) { + Py_DECREF(file_path); + return NULL; + } + } + + PyObject *self = (PyObject *)(type->tp_alloc(type, 0)); + if (self == NULL) { + goto error; + } + + if (file_obj == NULL) { + file_obj = PyObject_CallFunction(io_open, "Os", file_path, "rb"); + if (file_obj == NULL) { + goto error; + } + } + + if (load_data((PyZoneInfo_ZoneInfo *)self, file_obj)) { + goto error; + } + + PyObject *rv = PyObject_CallMethod(file_obj, "close", NULL); + Py_DECREF(file_obj); + file_obj = NULL; + if (rv == NULL) { + goto error; + } + Py_DECREF(rv); + + ((PyZoneInfo_ZoneInfo *)self)->key = key; + Py_INCREF(key); + + goto cleanup; +error: + Py_XDECREF(self); + self = NULL; +cleanup: + if (file_obj != NULL) { + PyObject *tmp = PyObject_CallMethod(file_obj, "close", NULL); + Py_DECREF(tmp); + Py_DECREF(file_obj); + } + Py_DECREF(file_path); + return self; +} + +static PyObject * +get_weak_cache(PyTypeObject *type) +{ + if (type == &PyZoneInfo_ZoneInfoType) { + return ZONEINFO_WEAK_CACHE; + } + else { + PyObject *cache = + PyObject_GetAttrString((PyObject *)type, "_weak_cache"); + // We are assuming that the type lives at least as long as the function + // that calls get_weak_cache, and that it holds a reference to the + // cache, so we'll return a "borrowed reference". + Py_XDECREF(cache); + return cache; + } +} + +static PyObject * +zoneinfo_new(PyTypeObject *type, PyObject *args, PyObject *kw) +{ + PyObject *key = NULL; + static char *kwlist[] = {"key", NULL}; + if (PyArg_ParseTupleAndKeywords(args, kw, "O", kwlist, &key) == 0) { + return NULL; + } + + PyObject *instance = zone_from_strong_cache(type, key); + if (instance != NULL) { + return instance; + } + + PyObject *weak_cache = get_weak_cache(type); + instance = PyObject_CallMethod(weak_cache, "get", "O", key, Py_None); + if (instance == NULL) { + return NULL; + } + + if (instance == Py_None) { + Py_DECREF(instance); + PyObject *tmp = zoneinfo_new_instance(type, key); + if (tmp == NULL) { + return NULL; + } + + instance = + PyObject_CallMethod(weak_cache, "setdefault", "OO", key, tmp); + ((PyZoneInfo_ZoneInfo *)instance)->source = SOURCE_CACHE; + + Py_DECREF(tmp); + + if (instance == NULL) { + return NULL; + } + } + + update_strong_cache(type, key, instance); + return instance; +} + +static void +zoneinfo_dealloc(PyObject *obj_self) +{ + PyZoneInfo_ZoneInfo *self = (PyZoneInfo_ZoneInfo *)obj_self; + + if (self->weakreflist != NULL) { + PyObject_ClearWeakRefs(obj_self); + } + + if (self->trans_list_utc != NULL) { + PyMem_Free(self->trans_list_utc); + } + + for (size_t i = 0; i < 2; i++) { + if (self->trans_list_wall[i] != NULL) { + PyMem_Free(self->trans_list_wall[i]); + } + } + + if (self->_ttinfos != NULL) { + for (size_t i = 0; i < self->num_ttinfos; ++i) { + xdecref_ttinfo(&(self->_ttinfos[i])); + } + PyMem_Free(self->_ttinfos); + } + + if (self->trans_ttinfos != NULL) { + PyMem_Free(self->trans_ttinfos); + } + + free_tzrule(&(self->tzrule_after)); + + Py_XDECREF(self->key); + Py_XDECREF(self->file_repr); + + Py_TYPE(self)->tp_free((PyObject *)self); +} + +static PyObject * +zoneinfo_from_file(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + PyObject *file_obj = NULL; + PyObject *file_repr = NULL; + PyObject *key = Py_None; + PyZoneInfo_ZoneInfo *self = NULL; + + static char *kwlist[] = {"", "key", NULL}; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O", kwlist, &file_obj, + &key)) { + return NULL; + } + + PyObject *obj_self = (PyObject *)(type->tp_alloc(type, 0)); + self = (PyZoneInfo_ZoneInfo *)obj_self; + if (self == NULL) { + return NULL; + } + + file_repr = PyUnicode_FromFormat("%R", file_obj); + if (file_repr == NULL) { + goto error; + } + + if (load_data(self, file_obj)) { + goto error; + } + + self->source = SOURCE_FILE; + self->file_repr = file_repr; + self->key = key; + Py_INCREF(key); + + return obj_self; +error: + Py_XDECREF(file_repr); + Py_XDECREF(self); + return NULL; +} + +static PyObject * +zoneinfo_no_cache(PyTypeObject *cls, PyObject *args, PyObject *kwargs) +{ + static char *kwlist[] = {"key", NULL}; + PyObject *key = NULL; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O", kwlist, &key)) { + return NULL; + } + + PyObject *out = zoneinfo_new_instance(cls, key); + if (out != NULL) { + ((PyZoneInfo_ZoneInfo *)out)->source = SOURCE_NOCACHE; + } + + return out; +} + +static PyObject * +zoneinfo_clear_cache(PyObject *cls, PyObject *args, PyObject *kwargs) +{ + PyObject *only_keys = NULL; + static char *kwlist[] = {"only_keys", NULL}; + + if (!(PyArg_ParseTupleAndKeywords(args, kwargs, "|$O", kwlist, + &only_keys))) { + return NULL; + } + + PyTypeObject *type = (PyTypeObject *)cls; + PyObject *weak_cache = get_weak_cache(type); + + if (only_keys == NULL || only_keys == Py_None) { + PyObject *rv = PyObject_CallMethod(weak_cache, "clear", NULL); + if (rv != NULL) { + Py_DECREF(rv); + } + + clear_strong_cache(type); + ZONEINFO_STRONG_CACHE = NULL; + } + else { + PyObject *item = NULL; + PyObject *pop = PyUnicode_FromString("pop"); + if (pop == NULL) { + return NULL; + } + + PyObject *iter = PyObject_GetIter(only_keys); + if (iter == NULL) { + Py_DECREF(pop); + return NULL; + } + + while ((item = PyIter_Next(iter))) { + // Remove from strong cache + eject_from_strong_cache(type, item); + + // Remove from weak cache + PyObject *tmp = PyObject_CallMethodObjArgs(weak_cache, pop, item, + Py_None, NULL); + + Py_DECREF(item); + if (tmp == NULL) { + break; + } + Py_DECREF(tmp); + } + Py_DECREF(iter); + Py_DECREF(pop); + } + + if (PyErr_Occurred()) { + return NULL; + } + + Py_RETURN_NONE; +} + +static PyObject * +zoneinfo_utcoffset(PyObject *self, PyObject *dt) +{ + _ttinfo *tti = find_ttinfo((PyZoneInfo_ZoneInfo *)self, dt); + if (tti == NULL) { + return NULL; + } + Py_INCREF(tti->utcoff); + return tti->utcoff; +} + +static PyObject * +zoneinfo_dst(PyObject *self, PyObject *dt) +{ + _ttinfo *tti = find_ttinfo((PyZoneInfo_ZoneInfo *)self, dt); + if (tti == NULL) { + return NULL; + } + Py_INCREF(tti->dstoff); + return tti->dstoff; +} + +static PyObject * +zoneinfo_tzname(PyObject *self, PyObject *dt) +{ + _ttinfo *tti = find_ttinfo((PyZoneInfo_ZoneInfo *)self, dt); + if (tti == NULL) { + return NULL; + } + Py_INCREF(tti->tzname); + return tti->tzname; +} + +#define HASTZINFO(p) (((_PyDateTime_BaseTZInfo *)(p))->hastzinfo) +#define GET_DT_TZINFO(p) \ + (HASTZINFO(p) ? ((PyDateTime_DateTime *)(p))->tzinfo : Py_None) + +static PyObject * +zoneinfo_fromutc(PyObject *obj_self, PyObject *dt) +{ + if (!PyDateTime_Check(dt)) { + PyErr_SetString(PyExc_TypeError, + "fromutc: argument must be a datetime"); + return NULL; + } + if (GET_DT_TZINFO(dt) != obj_self) { + PyErr_SetString(PyExc_ValueError, + "fromutc: dt.tzinfo " + "is not self"); + return NULL; + } + + PyZoneInfo_ZoneInfo *self = (PyZoneInfo_ZoneInfo *)obj_self; + + int64_t timestamp; + if (get_local_timestamp(dt, ×tamp)) { + return NULL; + } + size_t num_trans = self->num_transitions; + + _ttinfo *tti = NULL; + unsigned char fold = 0; + + if (num_trans >= 1 && timestamp < self->trans_list_utc[0]) { + tti = self->ttinfo_before; + } + else if (num_trans == 0 || + timestamp > self->trans_list_utc[num_trans - 1]) { + tti = find_tzrule_ttinfo_fromutc(&(self->tzrule_after), timestamp, + PyDateTime_GET_YEAR(dt), &fold); + + // Immediately after the last manual transition, the fold/gap is + // between self->trans_ttinfos[num_transitions - 1] and whatever + // ttinfo applies immediately after the last transition, not between + // the STD and DST rules in the tzrule_after, so we may need to + // adjust the fold value. + if (num_trans) { + _ttinfo *tti_prev = NULL; + if (num_trans == 1) { + tti_prev = self->ttinfo_before; + } + else { + tti_prev = self->trans_ttinfos[num_trans - 2]; + } + int64_t diff = tti_prev->utcoff_seconds - tti->utcoff_seconds; + if (diff > 0 && + timestamp < (self->trans_list_utc[num_trans - 1] + diff)) { + fold = 1; + } + } + } + else { + size_t idx = _bisect(timestamp, self->trans_list_utc, num_trans); + _ttinfo *tti_prev = NULL; + + if (idx >= 2) { + tti_prev = self->trans_ttinfos[idx - 2]; + tti = self->trans_ttinfos[idx - 1]; + } + else { + tti_prev = self->ttinfo_before; + tti = self->trans_ttinfos[0]; + } + + // Detect fold + int64_t shift = + (int64_t)(tti_prev->utcoff_seconds - tti->utcoff_seconds); + if (shift > (timestamp - self->trans_list_utc[idx - 1])) { + fold = 1; + } + } + + PyObject *tmp = PyNumber_Add(dt, tti->utcoff); + if (tmp == NULL) { + return NULL; + } + + if (fold) { + if (PyDateTime_CheckExact(tmp)) { + ((PyDateTime_DateTime *)tmp)->fold = 1; + dt = tmp; + } + else { + PyObject *replace = PyObject_GetAttrString(tmp, "replace"); + PyObject *args = PyTuple_New(0); + PyObject *kwargs = PyDict_New(); + + Py_DECREF(tmp); + if (args == NULL || kwargs == NULL || replace == NULL) { + Py_XDECREF(args); + Py_XDECREF(kwargs); + Py_XDECREF(replace); + return NULL; + } + + dt = NULL; + if (!PyDict_SetItemString(kwargs, "fold", _PyLong_One)) { + dt = PyObject_Call(replace, args, kwargs); + } + + Py_DECREF(args); + Py_DECREF(kwargs); + Py_DECREF(replace); + + if (dt == NULL) { + return NULL; + } + } + } + else { + dt = tmp; + } + return dt; +} + +static PyObject * +zoneinfo_repr(PyZoneInfo_ZoneInfo *self) +{ + PyObject *rv = NULL; + const char *type_name = Py_TYPE((PyObject *)self)->tp_name; + if (!(self->key == Py_None)) { + rv = PyUnicode_FromFormat("%s(key=%R)", type_name, self->key); + } + else { + assert(PyUnicode_Check(self->file_repr)); + rv = PyUnicode_FromFormat("%s.from_file(%U)", type_name, + self->file_repr); + } + + return rv; +} + +static PyObject * +zoneinfo_str(PyZoneInfo_ZoneInfo *self) +{ + if (!(self->key == Py_None)) { + Py_INCREF(self->key); + return self->key; + } + else { + return zoneinfo_repr(self); + } +} + +/* Pickles the ZoneInfo object by key and source. + * + * ZoneInfo objects are pickled by reference to the TZif file that they came + * from, which means that the exact transitions may be different or the file + * may not un-pickle if the data has changed on disk in the interim. + * + * It is necessary to include a bit indicating whether or not the object + * was constructed from the cache, because from-cache objects will hit the + * unpickling process's cache, whereas no-cache objects will bypass it. + * + * Objects constructed from ZoneInfo.from_file cannot be pickled. + */ +static PyObject * +zoneinfo_reduce(PyObject *obj_self, PyObject *unused) +{ + PyZoneInfo_ZoneInfo *self = (PyZoneInfo_ZoneInfo *)obj_self; + if (self->source == SOURCE_FILE) { + // Objects constructed from files cannot be pickled. + PyObject *pickle = PyImport_ImportModule("pickle"); + if (pickle == NULL) { + return NULL; + } + + PyObject *pickle_error = + PyObject_GetAttrString(pickle, "PicklingError"); + Py_DECREF(pickle); + if (pickle_error == NULL) { + return NULL; + } + + PyErr_Format(pickle_error, + "Cannot pickle a ZoneInfo file from a file stream."); + Py_DECREF(pickle_error); + return NULL; + } + + unsigned char from_cache = self->source == SOURCE_CACHE ? 1 : 0; + PyObject *constructor = PyObject_GetAttrString(obj_self, "_unpickle"); + + if (constructor == NULL) { + return NULL; + } + + PyObject *rv = Py_BuildValue("O(OB)", constructor, self->key, from_cache); + Py_DECREF(constructor); + return rv; +} + +static PyObject * +zoneinfo__unpickle(PyTypeObject *cls, PyObject *args) +{ + PyObject *key; + unsigned char from_cache; + if (!PyArg_ParseTuple(args, "OB", &key, &from_cache)) { + return NULL; + } + + if (from_cache) { + PyObject *val_args = Py_BuildValue("(O)", key); + if (val_args == NULL) { + return NULL; + } + + PyObject *rv = zoneinfo_new(cls, val_args, NULL); + + Py_DECREF(val_args); + return rv; + } + else { + return zoneinfo_new_instance(cls, key); + } +} + +/* It is relatively expensive to construct new timedelta objects, and in most + * cases we're looking at a relatively small number of timedeltas, such as + * integer number of hours, etc. We will keep a cache so that we construct + * a minimal number of these. + * + * Possibly this should be replaced with an LRU cache so that it's not possible + * for the memory usage to explode from this, but in order for this to be a + * serious problem, one would need to deliberately craft a malicious time zone + * file with many distinct offsets. As of tzdb 2019c, loading every single zone + * fills the cache with ~450 timedeltas for a total size of ~12kB. + * + * This returns a new reference to the timedelta. + */ +static PyObject * +load_timedelta(long seconds) +{ + PyObject *rv = NULL; + PyObject *pyoffset = PyLong_FromLong(seconds); + if (pyoffset == NULL) { + return NULL; + } + int contains = PyDict_Contains(TIMEDELTA_CACHE, pyoffset); + if (contains == -1) { + goto error; + } + + if (!contains) { + PyObject *tmp = PyDateTimeAPI->Delta_FromDelta( + 0, seconds, 0, 1, PyDateTimeAPI->DeltaType); + + if (tmp == NULL) { + goto error; + } + + rv = PyDict_SetDefault(TIMEDELTA_CACHE, pyoffset, tmp); + Py_DECREF(tmp); + } + else { + rv = PyDict_GetItem(TIMEDELTA_CACHE, pyoffset); + } + + Py_DECREF(pyoffset); + Py_INCREF(rv); + return rv; +error: + Py_DECREF(pyoffset); + return NULL; +} + +/* Constructor for _ttinfo object - this starts by initializing the _ttinfo + * to { NULL, NULL, NULL }, so that Py_XDECREF will work on partially + * initialized _ttinfo objects. + */ +static int +build_ttinfo(long utcoffset, long dstoffset, PyObject *tzname, _ttinfo *out) +{ + out->utcoff = NULL; + out->dstoff = NULL; + out->tzname = NULL; + + out->utcoff_seconds = utcoffset; + out->utcoff = load_timedelta(utcoffset); + if (out->utcoff == NULL) { + return -1; + } + + out->dstoff = load_timedelta(dstoffset); + if (out->dstoff == NULL) { + return -1; + } + + out->tzname = tzname; + Py_INCREF(tzname); + + return 0; +} + +/* Decrease reference count on any non-NULL members of a _ttinfo */ +static void +xdecref_ttinfo(_ttinfo *ttinfo) +{ + if (ttinfo != NULL) { + Py_XDECREF(ttinfo->utcoff); + Py_XDECREF(ttinfo->dstoff); + Py_XDECREF(ttinfo->tzname); + } +} + +/* Equality function for _ttinfo. */ +static int +ttinfo_eq(const _ttinfo *const tti0, const _ttinfo *const tti1) +{ + int rv; + if ((rv = PyObject_RichCompareBool(tti0->utcoff, tti1->utcoff, Py_EQ)) < + 1) { + goto end; + } + + if ((rv = PyObject_RichCompareBool(tti0->dstoff, tti1->dstoff, Py_EQ)) < + 1) { + goto end; + } + + if ((rv = PyObject_RichCompareBool(tti0->tzname, tti1->tzname, Py_EQ)) < + 1) { + goto end; + } +end: + return rv; +} + +/* Given a file-like object, this populates a ZoneInfo object + * + * The current version calls into a Python function to read the data from + * file into Python objects, and this translates those Python objects into + * C values and calculates derived values (e.g. dstoff) in C. + * + * This returns 0 on success and -1 on failure. + * + * The function will never return while `self` is partially initialized ? + * the object only needs to be freed / deallocated if this succeeds. + */ +static int +load_data(PyZoneInfo_ZoneInfo *self, PyObject *file_obj) +{ + PyObject *data_tuple = NULL; + + long *utcoff = NULL; + long *dstoff = NULL; + size_t *trans_idx = NULL; + unsigned char *isdst = NULL; + + self->trans_list_utc = NULL; + self->trans_list_wall[0] = NULL; + self->trans_list_wall[1] = NULL; + self->trans_ttinfos = NULL; + self->_ttinfos = NULL; + self->file_repr = NULL; + + size_t ttinfos_allocated = 0; + + data_tuple = PyObject_CallMethod(_common_mod, "load_data", "O", file_obj); + + if (data_tuple == NULL) { + goto error; + } + + if (!PyTuple_CheckExact(data_tuple)) { + PyErr_Format(PyExc_TypeError, "Invalid data result type: %r", + data_tuple); + goto error; + } + + // Unpack the data tuple + PyObject *trans_idx_list = PyTuple_GetItem(data_tuple, 0); + if (trans_idx_list == NULL) { + goto error; + } + + PyObject *trans_utc = PyTuple_GetItem(data_tuple, 1); + if (trans_utc == NULL) { + goto error; + } + + PyObject *utcoff_list = PyTuple_GetItem(data_tuple, 2); + if (utcoff_list == NULL) { + goto error; + } + + PyObject *isdst_list = PyTuple_GetItem(data_tuple, 3); + if (isdst_list == NULL) { + goto error; + } + + PyObject *abbr = PyTuple_GetItem(data_tuple, 4); + if (abbr == NULL) { + goto error; + } + + PyObject *tz_str = PyTuple_GetItem(data_tuple, 5); + if (tz_str == NULL) { + goto error; + } + + // Load the relevant sizes + Py_ssize_t num_transitions = PyTuple_Size(trans_utc); + if (num_transitions == -1) { + goto error; + } + + Py_ssize_t num_ttinfos = PyTuple_Size(utcoff_list); + if (num_ttinfos == -1) { + goto error; + } + + self->num_transitions = (size_t)num_transitions; + self->num_ttinfos = (size_t)num_ttinfos; + + // Load the transition indices and list + self->trans_list_utc = + PyMem_Malloc(self->num_transitions * sizeof(int64_t)); + trans_idx = PyMem_Malloc(self->num_transitions * sizeof(Py_ssize_t)); + + for (Py_ssize_t i = 0; i < self->num_transitions; ++i) { + PyObject *num = PyTuple_GetItem(trans_utc, i); + if (num == NULL) { + goto error; + } + self->trans_list_utc[i] = PyLong_AsLongLong(num); + if (self->trans_list_utc[i] == -1 && PyErr_Occurred()) { + goto error; + } + + num = PyTuple_GetItem(trans_idx_list, i); + if (num == NULL) { + goto error; + } + + Py_ssize_t cur_trans_idx = PyLong_AsSsize_t(num); + if (cur_trans_idx == -1) { + goto error; + } + + trans_idx[i] = (size_t)cur_trans_idx; + if (trans_idx[i] > self->num_ttinfos) { + PyErr_Format( + PyExc_ValueError, + "Invalid transition index found while reading TZif: %zd", + cur_trans_idx); + + goto error; + } + } + + // Load UTC offsets and isdst (size num_ttinfos) + utcoff = PyMem_Malloc(self->num_ttinfos * sizeof(long)); + isdst = PyMem_Malloc(self->num_ttinfos * sizeof(unsigned char)); + + if (utcoff == NULL || isdst == NULL) { + goto error; + } + for (Py_ssize_t i = 0; i < self->num_ttinfos; ++i) { + PyObject *num = PyTuple_GetItem(utcoff_list, i); + if (num == NULL) { + goto error; + } + + utcoff[i] = PyLong_AsLong(num); + if (utcoff[i] == -1 && PyErr_Occurred()) { + goto error; + } + + num = PyTuple_GetItem(isdst_list, i); + if (num == NULL) { + goto error; + } + + int isdst_with_error = PyObject_IsTrue(num); + if (isdst_with_error == -1) { + goto error; + } + else { + isdst[i] = (unsigned char)isdst_with_error; + } + } + + dstoff = PyMem_Calloc(self->num_ttinfos, sizeof(long)); + if (dstoff == NULL) { + goto error; + } + + // Derive dstoff and trans_list_wall from the information we've loaded + utcoff_to_dstoff(trans_idx, utcoff, dstoff, isdst, self->num_transitions, + self->num_ttinfos); + + if (ts_to_local(trans_idx, self->trans_list_utc, utcoff, + self->trans_list_wall, self->num_ttinfos, + self->num_transitions)) { + goto error; + } + + // Build _ttinfo objects from utcoff, dstoff and abbr + self->_ttinfos = PyMem_Malloc(self->num_ttinfos * sizeof(_ttinfo)); + for (size_t i = 0; i < self->num_ttinfos; ++i) { + PyObject *tzname = PyTuple_GetItem(abbr, i); + if (tzname == NULL) { + goto error; + } + + ttinfos_allocated++; + if (build_ttinfo(utcoff[i], dstoff[i], tzname, &(self->_ttinfos[i]))) { + goto error; + } + } + + // Build our mapping from transition to the ttinfo that applies + self->trans_ttinfos = + PyMem_Calloc(self->num_transitions, sizeof(_ttinfo *)); + for (size_t i = 0; i < self->num_transitions; ++i) { + size_t ttinfo_idx = trans_idx[i]; + assert(ttinfo_idx < self->num_ttinfos); + self->trans_ttinfos[i] = &(self->_ttinfos[ttinfo_idx]); + } + + // Set ttinfo_before to the first non-DST transition + for (size_t i = 0; i < self->num_ttinfos; ++i) { + if (!isdst[i]) { + self->ttinfo_before = &(self->_ttinfos[i]); + break; + } + } + + // If there are only DST ttinfos, pick the first one, if there are no + // ttinfos at all, set ttinfo_before to NULL + if (self->ttinfo_before == NULL && self->num_ttinfos > 0) { + self->ttinfo_before = &(self->_ttinfos[0]); + } + + if (tz_str != Py_None && PyObject_IsTrue(tz_str)) { + if (parse_tz_str(tz_str, &(self->tzrule_after))) { + goto error; + } + } + else { + if (!self->num_ttinfos) { + PyErr_Format(PyExc_ValueError, "No time zone information found."); + goto error; + } + + size_t idx; + if (!self->num_transitions) { + idx = self->num_ttinfos - 1; + } + else { + idx = trans_idx[self->num_transitions - 1]; + } + + _ttinfo *tti = &(self->_ttinfos[idx]); + build_tzrule(tti->tzname, NULL, tti->utcoff_seconds, 0, NULL, NULL, + &(self->tzrule_after)); + + // We've abused the build_tzrule constructor to construct an STD-only + // rule mimicking whatever ttinfo we've picked up, but it's possible + // that the one we've picked up is a DST zone, so we need to make sure + // that the dstoff is set correctly in that case. + if (PyObject_IsTrue(tti->dstoff)) { + _ttinfo *tti_after = &(self->tzrule_after.std); + Py_DECREF(tti_after->dstoff); + tti_after->dstoff = tti->dstoff; + Py_INCREF(tti_after->dstoff); + } + } + + // Determine if this is a "fixed offset" zone, meaning that the output of + // the utcoffset, dst and tzname functions does not depend on the specific + // datetime passed. + // + // We make three simplifying assumptions here: + // + // 1. If tzrule_after is not std_only, it has transitions that might occur + // (it is possible to construct TZ strings that specify STD and DST but + // no transitions ever occur, such as AAA0BBB,0/0,J365/25). + // 2. If self->_ttinfos contains more than one _ttinfo object, the objects + // represent different offsets. + // 3. self->ttinfos contains no unused _ttinfos (in which case an otherwise + // fixed-offset zone with extra _ttinfos defined may appear to *not* be + // a fixed offset zone). + // + // Violations to these assumptions would be fairly exotic, and exotic + // zones should almost certainly not be used with datetime.time (the + // only thing that would be affected by this). + if (self->num_ttinfos > 1 || !self->tzrule_after.std_only) { + self->fixed_offset = 0; + } + else if (self->num_ttinfos == 0) { + self->fixed_offset = 1; + } + else { + int constant_offset = + ttinfo_eq(&(self->_ttinfos[0]), &self->tzrule_after.std); + if (constant_offset < 0) { + goto error; + } + else { + self->fixed_offset = constant_offset; + } + } + + int rv = 0; + goto cleanup; +error: + // These resources only need to be freed if we have failed, if we succeed + // in initializing a PyZoneInfo_ZoneInfo object, we can rely on its dealloc + // method to free the relevant resources. + if (self->trans_list_utc != NULL) { + PyMem_Free(self->trans_list_utc); + self->trans_list_utc = NULL; + } + + for (size_t i = 0; i < 2; ++i) { + if (self->trans_list_wall[i] != NULL) { + PyMem_Free(self->trans_list_wall[i]); + self->trans_list_wall[i] = NULL; + } + } + + if (self->_ttinfos != NULL) { + for (size_t i = 0; i < ttinfos_allocated; ++i) { + xdecref_ttinfo(&(self->_ttinfos[i])); + } + PyMem_Free(self->_ttinfos); + self->_ttinfos = NULL; + } + + if (self->trans_ttinfos != NULL) { + PyMem_Free(self->trans_ttinfos); + self->trans_ttinfos = NULL; + } + + rv = -1; +cleanup: + Py_XDECREF(data_tuple); + + if (utcoff != NULL) { + PyMem_Free(utcoff); + } + + if (dstoff != NULL) { + PyMem_Free(dstoff); + } + + if (isdst != NULL) { + PyMem_Free(isdst); + } + + if (trans_idx != NULL) { + PyMem_Free(trans_idx); + } + + return rv; +} + +/* Function to calculate the local timestamp of a transition from the year. */ +int64_t +calendarrule_year_to_timestamp(TransitionRuleType *base_self, int year) +{ + CalendarRule *self = (CalendarRule *)base_self; + + // We want (year, month, day of month); we have year and month, but we + // need to turn (week, day-of-week) into day-of-month + // + // Week 1 is the first week in which day `day` (where 0 = Sunday) appears. + // Week 5 represents the last occurrence of day `day`, so we need to know + // the first weekday of the month and the number of days in the month. + int8_t first_day = (ymd_to_ord(year, self->month, 1) + 6) % 7; + uint8_t days_in_month = DAYS_IN_MONTH[self->month]; + if (self->month == 2 && is_leap_year(year)) { + days_in_month += 1; + } + + // This equation seems magical, so I'll break it down: + // 1. calendar says 0 = Monday, POSIX says 0 = Sunday so we need first_day + // + 1 to get 1 = Monday -> 7 = Sunday, which is still equivalent + // because this math is mod 7 + // 2. Get first day - desired day mod 7 (adjusting by 7 for negative + // numbers so that -1 % 7 = 6). + // 3. Add 1 because month days are a 1-based index. + int8_t month_day = ((int8_t)(self->day) - (first_day + 1)) % 7; + if (month_day < 0) { + month_day += 7; + } + month_day += 1; + + // Now use a 0-based index version of `week` to calculate the w-th + // occurrence of `day` + month_day += ((int8_t)(self->week) - 1) * 7; + + // month_day will only be > days_in_month if w was 5, and `w` means "last + // occurrence of `d`", so now we just check if we over-shot the end of the + // month and if so knock off 1 week. + if (month_day > days_in_month) { + month_day -= 7; + } + + int64_t ordinal = ymd_to_ord(year, self->month, month_day) - EPOCHORDINAL; + return ((ordinal * 86400) + (int64_t)(self->hour * 3600) + + (int64_t)(self->minute * 60) + (int64_t)(self->second)); +} + +/* Constructor for CalendarRule. */ +int +calendarrule_new(uint8_t month, uint8_t week, uint8_t day, int8_t hour, + int8_t minute, int8_t second, CalendarRule *out) +{ + // These bounds come from the POSIX standard, which describes an Mm.n.d + // rule as: + // + // The d'th day (0 <= d <= 6) of week n of month m of the year (1 <= n <= + // 5, 1 <= m <= 12, where week 5 means "the last d day in month m" which + // may occur in either the fourth or the fifth week). Week 1 is the first + // week in which the d'th day occurs. Day zero is Sunday. + if (month <= 0 || month > 12) { + PyErr_Format(PyExc_ValueError, "Month must be in (0, 12]"); + return -1; + } + + if (week <= 0 || week > 5) { + PyErr_Format(PyExc_ValueError, "Week must be in (0, 5]"); + return -1; + } + + // day is an unsigned integer, so day < 0 should always return false, but + // if day's type changes to a signed integer *without* changing this value, + // it may create a bug. Considering that the compiler should be able to + // optimize out the first comparison if day is an unsigned integer anyway, + // we will leave this comparison in place and disable the compiler warning. +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wtype-limits" + if (day < 0 || day > 6) { +#pragma GCC diagnostic pop + PyErr_Format(PyExc_ValueError, "Day must be in [0, 6]"); + return -1; + } + + TransitionRuleType base = {&calendarrule_year_to_timestamp}; + + CalendarRule new_offset = { + .base = base, + .month = month, + .week = week, + .day = day, + .hour = hour, + .minute = minute, + .second = second, + }; + + *out = new_offset; + return 0; +} + +/* Function to calculate the local timestamp of a transition from the year. + * + * This translates the day of the year into a local timestamp ? either a + * 1-based Julian day, not including leap days, or the 0-based year-day, + * including leap days. + * */ +int64_t +dayrule_year_to_timestamp(TransitionRuleType *base_self, int year) +{ + // The function signature requires a TransitionRuleType pointer, but this + // function is only applicable to DayRule* objects. + DayRule *self = (DayRule *)base_self; + + // ymd_to_ord calculates the number of days since 0001-01-01, but we want + // to know the number of days since 1970-01-01, so we must subtract off + // the equivalent of ymd_to_ord(1970, 1, 1). + // + // We subtract off an additional 1 day to account for January 1st (we want + // the number of full days *before* the date of the transition - partial + // days are accounted for in the hour, minute and second portions. + int64_t days_before_year = ymd_to_ord(year, 1, 1) - EPOCHORDINAL - 1; + + // The Julian day specification skips over February 29th in leap years, + // from the POSIX standard: + // + // Leap days shall not be counted. That is, in all years-including leap + // years-February 28 is day 59 and March 1 is day 60. It is impossible to + // refer explicitly to the occasional February 29. + // + // This is actually more useful than you'd think ? if you want a rule that + // always transitions on a given calendar day (other than February 29th), + // you would use a Julian day, e.g. J91 always refers to April 1st and J365 + // always refers to December 31st. + unsigned int day = self->day; + if (self->julian && day >= 59 && is_leap_year(year)) { + day += 1; + } + + return ((days_before_year + day) * 86400) + (self->hour * 3600) + + (self->minute * 60) + self->second; +} + +/* Constructor for DayRule. */ +static int +dayrule_new(uint8_t julian, unsigned int day, int8_t hour, int8_t minute, + int8_t second, DayRule *out) +{ + // The POSIX standard specifies that Julian days must be in the range (1 <= + // n <= 365) and that non-Julian (they call it "0-based Julian") days must + // be in the range (0 <= n <= 365). + if (day < julian || day > 365) { + PyErr_Format(PyExc_ValueError, "day must be in [%u, 365], not: %u", + julian, day); + return -1; + } + + TransitionRuleType base = { + &dayrule_year_to_timestamp, + }; + + DayRule tmp = { + .base = base, + .julian = julian, + .day = day, + .hour = hour, + .minute = minute, + .second = second, + }; + + *out = tmp; + + return 0; +} + +/* Calculate the start and end rules for a _tzrule in the given year. */ +static void +tzrule_transitions(_tzrule *rule, int year, int64_t *start, int64_t *end) +{ + assert(rule->start != NULL); + assert(rule->end != NULL); + *start = rule->start->year_to_timestamp(rule->start, year); + *end = rule->end->year_to_timestamp(rule->end, year); +} + +/* Calculate the _ttinfo that applies at a given local time from a _tzrule. + * + * This takes a local timestamp and fold for disambiguation purposes; the year + * could technically be calculated from the timestamp, but given that the + * callers of this function already have the year information accessible from + * the datetime struct, it is taken as an additional parameter to reduce + * unncessary calculation. + * */ +static _ttinfo * +find_tzrule_ttinfo(_tzrule *rule, int64_t ts, unsigned char fold, int year) +{ + if (rule->std_only) { + return &(rule->std); + } + + int64_t start, end; + uint8_t isdst; + + tzrule_transitions(rule, year, &start, &end); + + // With fold = 0, the period (denominated in local time) with the smaller + // offset starts at the end of the gap and ends at the end of the fold; + // with fold = 1, it runs from the start of the gap to the beginning of the + // fold. + // + // So in order to determine the DST boundaries we need to know both the + // fold and whether DST is positive or negative (rare), and it turns out + // that this boils down to fold XOR is_positive. + if (fold == (rule->dst_diff >= 0)) { + end -= rule->dst_diff; + } + else { + start += rule->dst_diff; + } + + if (start < end) { + isdst = (ts >= start) && (ts < end); + } + else { + isdst = (ts < end) || (ts >= start); + } + + if (isdst) { + return &(rule->dst); + } + else { + return &(rule->std); + } +} + +/* Calculate the ttinfo and fold that applies for a _tzrule at an epoch time. + * + * This function can determine the _ttinfo that applies at a given epoch time, + * (analogous to trans_list_utc), and whether or not the datetime is in a fold. + * This is to be used in the .fromutc() function. + * + * The year is technically a redundant parameter, because it can be calculated + * from the timestamp, but all callers of this function should have the year + * in the datetime struct anyway, so taking it as a parameter saves unnecessary + * calculation. + **/ +static _ttinfo * +find_tzrule_ttinfo_fromutc(_tzrule *rule, int64_t ts, int year, + unsigned char *fold) +{ + if (rule->std_only) { + *fold = 0; + return &(rule->std); + } + + int64_t start, end; + uint8_t isdst; + tzrule_transitions(rule, year, &start, &end); + start -= rule->std.utcoff_seconds; + end -= rule->dst.utcoff_seconds; + + if (start < end) { + isdst = (ts >= start) && (ts < end); + } + else { + isdst = (ts < end) || (ts >= start); + } + + // For positive DST, the ambiguous period is one dst_diff after the end of + // DST; for negative DST, the ambiguous period is one dst_diff before the + // start of DST. + int64_t ambig_start, ambig_end; + if (rule->dst_diff > 0) { + ambig_start = end; + ambig_end = end + rule->dst_diff; + } + else { + ambig_start = start; + ambig_end = start - rule->dst_diff; + } + + *fold = (ts >= ambig_start) && (ts < ambig_end); + + if (isdst) { + return &(rule->dst); + } + else { + return &(rule->std); + } +} + +/* Parse a TZ string in the format specified by the POSIX standard: + * + * std offset[dst[offset],start[/time],end[/time]] + * + * std and dst must be 3 or more characters long and must not contain a + * leading colon, embedded digits, commas, nor a plus or minus signs; The + * spaces between "std" and "offset" are only for display and are not actually + * present in the string. + * + * The format of the offset is ``[+|-]hh[:mm[:ss]]`` + * + * See the POSIX.1 spec: IEE Std 1003.1-2018 ?8.3: + * + * https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html + */ +static int +parse_tz_str(PyObject *tz_str_obj, _tzrule *out) +{ + PyObject *std_abbr = NULL; + PyObject *dst_abbr = NULL; + TransitionRuleType *start = NULL; + TransitionRuleType *end = NULL; + long std_offset, dst_offset; + + char *tz_str = PyBytes_AsString(tz_str_obj); + if (tz_str == NULL) { + return -1; + } + char *p = tz_str; + + // Read the `std` abbreviation, which must be at least 3 characters long. + ssize_t num_chars = parse_abbr(p, &std_abbr); + if (num_chars < 1) { + PyErr_Format(PyExc_ValueError, "Invalid STD format in %R", tz_str_obj); + goto error; + } + + p += num_chars; + + // Now read the STD offset, which is required + num_chars = parse_tz_delta(p, &std_offset); + if (num_chars < 0) { + PyErr_Format(PyExc_ValueError, "Invalid STD offset in %R", tz_str_obj); + goto error; + } + p += num_chars; + + // If the string ends here, there is no DST, otherwise we must parse the + // DST abbreviation and start and end dates and times. + if (*p == '\0') { + goto complete; + } + + num_chars = parse_abbr(p, &dst_abbr); + if (num_chars < 1) { + PyErr_Format(PyExc_ValueError, "Invalid DST format in %R", tz_str_obj); + goto error; + } + p += num_chars; + + if (*p == ',') { + // From the POSIX standard: + // + // If no offset follows dst, the alternative time is assumed to be one + // hour ahead of standard time. + dst_offset = std_offset + 3600; + } + else { + num_chars = parse_tz_delta(p, &dst_offset); + if (num_chars < 0) { + PyErr_Format(PyExc_ValueError, "Invalid DST offset in %R", + tz_str_obj); + goto error; + } + + p += num_chars; + } + + TransitionRuleType **transitions[2] = {&start, &end}; + for (size_t i = 0; i < 2; ++i) { + if (*p != ',') { + PyErr_Format(PyExc_ValueError, + "Missing transition rules in TZ string: %R", + tz_str_obj); + goto error; + } + p++; + + num_chars = parse_transition_rule(p, transitions[i]); + if (num_chars < 0) { + PyErr_Format(PyExc_ValueError, + "Malformed transition rule in TZ string: %R", + tz_str_obj); + goto error; + } + p += num_chars; + } + + if (*p != '\0') { + PyErr_Format(PyExc_ValueError, + "Extraneous characters at end of TZ string: %R", + tz_str_obj); + goto error; + } + +complete: + build_tzrule(std_abbr, dst_abbr, std_offset, dst_offset, start, end, out); + Py_DECREF(std_abbr); + Py_XDECREF(dst_abbr); + + return 0; +error: + Py_XDECREF(std_abbr); + if (dst_abbr != NULL && dst_abbr != Py_None) { + Py_DECREF(dst_abbr); + } + + if (start != NULL) { + PyMem_Free(start); + } + + if (end != NULL) { + PyMem_Free(end); + } + + return -1; +} + +static ssize_t +parse_uint(const char *const p) +{ + if (!isdigit(*p)) { + return -1; + } + + return (*p) - '0'; +} + +/* Parse the STD and DST abbreviations from a TZ string. */ +static ssize_t +parse_abbr(const char *const p, PyObject **abbr) +{ + const char *ptr = p; + char buff = *ptr; + const char *str_start; + const char *str_end; + + if (*ptr == '<') { + ptr++; + str_start = ptr; + while ((buff = *ptr) != '>') { + // From the POSIX standard: + // + // In the quoted form, the first character shall be the less-than + // ( '<' ) character and the last character shall be the + // greater-than ( '>' ) character. All characters between these + // quoting characters shall be alphanumeric characters from the + // portable character set in the current locale, the plus-sign ( + // '+' ) character, or the minus-sign ( '-' ) character. The std + // and dst fields in this case shall not include the quoting + // characters. + if (!isalpha(buff) && !isdigit(buff) && buff != '+' && + buff != '-') { + return -1; + } + ptr++; + } + str_end = ptr; + ptr++; + } + else { + str_start = p; + // From the POSIX standard: + // + // In the unquoted form, all characters in these fields shall be + // alphabetic characters from the portable character set in the + // current locale. + while (isalpha(*ptr)) { + ptr++; + } + str_end = ptr; + } + + *abbr = PyUnicode_FromStringAndSize(str_start, str_end - str_start); + if (abbr == NULL) { + return -1; + } + + return ptr - p; +} + +/* Parse a UTC offset from a TZ str. */ +static ssize_t +parse_tz_delta(const char *const p, long *total_seconds) +{ + // From the POSIX spec: + // + // Indicates the value added to the local time to arrive at Coordinated + // Universal Time. The offset has the form: + // + // hh[:mm[:ss]] + // + // One or more digits may be used; the value is always interpreted as a + // decimal number. + // + // The POSIX spec says that the values for `hour` must be between 0 and 24 + // hours, but RFC 8536 ?3.3.1 specifies that the hours part of the + // transition times may be signed and range from -167 to 167. + long sign = -1; + long hours = 0; + long minutes = 0; + long seconds = 0; + + const char *ptr = p; + char buff = *ptr; + if (buff == '-' || buff == '+') { + // Negative numbers correspond to *positive* offsets, from the spec: + // + // If preceded by a '-', the timezone shall be east of the Prime + // Meridian; otherwise, it shall be west (which may be indicated by + // an optional preceding '+' ). + if (buff == '-') { + sign = 1; + } + + ptr++; + } + + // The hour can be 1 or 2 numeric characters + for (size_t i = 0; i < 2; ++i) { + buff = *ptr; + if (!isdigit(buff)) { + if (i == 0) { + return -1; + } + else { + break; + } + } + + hours *= 10; + hours += buff - '0'; + ptr++; + } + + if (hours > 24 || hours < 0) { + return -1; + } + + // Minutes and seconds always of the format ":dd" + long *outputs[2] = {&minutes, &seconds}; + for (size_t i = 0; i < 2; ++i) { + if (*ptr != ':') { + goto complete; + } + ptr++; + + for (size_t j = 0; j < 2; ++j) { + buff = *ptr; + if (!isdigit(buff)) { + return -1; + } + *(outputs[i]) *= 10; + *(outputs[i]) += buff - '0'; + ptr++; + } + } + +complete: + *total_seconds = sign * ((hours * 3600) + (minutes * 60) + seconds); + + return ptr - p; +} + +/* Parse the date portion of a transition rule. */ +static ssize_t +parse_transition_rule(const char *const p, TransitionRuleType **out) +{ + // The full transition rule indicates when to change back and forth between + // STD and DST, and has the form: + // + // date[/time],date[/time] + // + // This function parses an individual date[/time] section, and returns + // the number of characters that contributed to the transition rule. This + // does not include the ',' at the end of the first rule. + // + // The POSIX spec states that if *time* is not given, the default is 02:00. + const char *ptr = p; + int8_t hour = 2; + int8_t minute = 0; + int8_t second = 0; + + // Rules come in one of three flavors: + // + // 1. Jn: Julian day n, with no leap days. + // 2. n: Day of year (0-based, with leap days) + // 3. Mm.n.d: Specifying by month, week and day-of-week. + + if (*ptr == 'M') { + uint8_t month, week, day; + ptr++; + ssize_t tmp = parse_uint(ptr); + if (tmp < 0) { + return -1; + } + month = (uint8_t)tmp; + ptr++; + if (*ptr != '.') { + tmp = parse_uint(ptr); + if (tmp < 0) { + return -1; + } + + month *= 10; + month += (uint8_t)tmp; + ptr++; + } + + uint8_t *values[2] = {&week, &day}; + for (size_t i = 0; i < 2; ++i) { + if (*ptr != '.') { + return -1; + } + ptr++; + + tmp = parse_uint(ptr); + if (tmp < 0) { + return -1; + } + ptr++; + + *(values[i]) = tmp; + } + + if (*ptr == '/') { + ptr++; + ssize_t num_chars = + parse_transition_time(ptr, &hour, &minute, &second); + if (num_chars < 0) { + return -1; + } + ptr += num_chars; + } + + CalendarRule *rv = PyMem_Calloc(1, sizeof(CalendarRule)); + if (rv == NULL) { + return -1; + } + + if (calendarrule_new(month, week, day, hour, minute, second, rv)) { + PyMem_Free(rv); + return -1; + } + + *out = (TransitionRuleType *)rv; + } + else { + uint8_t julian = 0; + unsigned int day = 0; + if (*ptr == 'J') { + julian = 1; + ptr++; + } + + for (size_t i = 0; i < 3; ++i) { + if (!isdigit(*ptr)) { + if (i == 0) { + return -1; + } + break; + } + day *= 10; + day += (*ptr) - '0'; + ptr++; + } + + if (*ptr == '/') { + ptr++; + ssize_t num_chars = + parse_transition_time(ptr, &hour, &minute, &second); + if (num_chars < 0) { + return -1; + } + ptr += num_chars; + } + + DayRule *rv = PyMem_Calloc(1, sizeof(DayRule)); + if (rv == NULL) { + return -1; + } + + if (dayrule_new(julian, day, hour, minute, second, rv)) { + PyMem_Free(rv); + return -1; + } + *out = (TransitionRuleType *)rv; + } + + return ptr - p; +} + +/* Parse the time portion of a transition rule (e.g. following an /) */ +static ssize_t +parse_transition_time(const char *const p, int8_t *hour, int8_t *minute, + int8_t *second) +{ + // From the spec: + // + // The time has the same format as offset except that no leading sign + // ( '-' or '+' ) is allowed. + // + // The format for the offset is: + // + // h[h][:mm[:ss]] + // + // RFC 8536 also allows transition times to be signed and to range from + // -167 to +167, but the current version only supports [0, 99]. + // + // TODO: Support the full range of transition hours. + int8_t *components[3] = {hour, minute, second}; + const char *ptr = p; + int8_t sign = 1; + + if (*ptr == '-' || *ptr == '+') { + if (*ptr == '-') { + sign = -1; + } + ptr++; + } + + for (size_t i = 0; i < 3; ++i) { + if (i > 0) { + if (*ptr != ':') { + break; + } + ptr++; + } + + uint8_t buff = 0; + for (size_t j = 0; j < 2; j++) { + if (!isdigit(*ptr)) { + if (i == 0 && j > 0) { + break; + } + return -1; + } + + buff *= 10; + buff += (*ptr) - '0'; + ptr++; + } + + *(components[i]) = sign * buff; + } + + return ptr - p; +} + +/* Constructor for a _tzrule. + * + * If `dst_abbr` is NULL, this will construct an "STD-only" _tzrule, in which + * case `dst_offset` will be ignored and `start` and `end` are expected to be + * NULL as well. + * + * Returns 0 on success. + */ +static int +build_tzrule(PyObject *std_abbr, PyObject *dst_abbr, long std_offset, + long dst_offset, TransitionRuleType *start, + TransitionRuleType *end, _tzrule *out) +{ + _tzrule rv = {0}; + + rv.start = start; + rv.end = end; + + if (build_ttinfo(std_offset, 0, std_abbr, &rv.std)) { + goto error; + } + + if (dst_abbr != NULL) { + rv.dst_diff = dst_offset - std_offset; + if (build_ttinfo(dst_offset, rv.dst_diff, dst_abbr, &rv.dst)) { + goto error; + } + } + else { + rv.std_only = 1; + } + + *out = rv; + + return 0; +error: + xdecref_ttinfo(&rv.std); + xdecref_ttinfo(&rv.dst); + return -1; +} + +/* Destructor for _tzrule. */ +static void +free_tzrule(_tzrule *tzrule) +{ + xdecref_ttinfo(&(tzrule->std)); + if (!tzrule->std_only) { + xdecref_ttinfo(&(tzrule->dst)); + } + + if (tzrule->start != NULL) { + PyMem_Free(tzrule->start); + } + + if (tzrule->end != NULL) { + PyMem_Free(tzrule->end); + } +} + +/* Calculate DST offsets from transitions and UTC offsets + * + * This is necessary because each C `ttinfo` only contains the UTC offset, + * time zone abbreviation and an isdst boolean - it does not include the + * amount of the DST offset, but we need the amount for the dst() function. + * + * Thus function uses heuristics to infer what the offset should be, so it + * is not guaranteed that this will work for all zones. If we cannot assign + * a value for a given DST offset, we'll assume it's 1H rather than 0H, so + * bool(dt.dst()) will always match ttinfo.isdst. + */ +static void +utcoff_to_dstoff(size_t *trans_idx, long *utcoffs, long *dstoffs, + unsigned char *isdsts, size_t num_transitions, + size_t num_ttinfos) +{ + size_t dst_count = 0; + size_t dst_found = 0; + for (size_t i = 0; i < num_ttinfos; ++i) { + dst_count++; + } + + for (size_t i = 1; i < num_transitions; ++i) { + if (dst_count == dst_found) { + break; + } + + size_t idx = trans_idx[i]; + size_t comp_idx = trans_idx[i - 1]; + + // Only look at DST offsets that have nto been assigned already + if (!isdsts[idx] || dstoffs[idx] != 0) { + continue; + } + + long dstoff = 0; + long utcoff = utcoffs[idx]; + + if (!isdsts[comp_idx]) { + dstoff = utcoff - utcoffs[comp_idx]; + } + + if (!dstoff && idx < (num_ttinfos - 1)) { + comp_idx = trans_idx[i + 1]; + + // If the following transition is also DST and we couldn't find + // the DST offset by this point, we're going to have to skip it + // and hope this transition gets assigned later + if (isdsts[comp_idx]) { + continue; + } + + dstoff = utcoff - utcoffs[comp_idx]; + } + + if (dstoff) { + dst_found++; + dstoffs[idx] = dstoff; + } + } + + if (dst_found < dst_count) { + // If there are time zones we didn't find a value for, we'll end up + // with dstoff = 0 for something where isdst=1. This is obviously + // wrong ? one hour will be a much better guess than 0. + for (size_t idx = 0; idx < num_ttinfos; ++idx) { + if (isdsts[idx] && !dstoffs[idx]) { + dstoffs[idx] = 3600; + } + } + } +} + +#define _swap(x, y, buffer) \ + buffer = x; \ + x = y; \ + y = buffer; + +/* Calculate transitions in local time from UTC time and offsets. + * + * We want to know when each transition occurs, denominated in the number of + * nominal wall-time seconds between 1970-01-01T00:00:00 and the transition in + * *local time* (note: this is *not* equivalent to the output of + * datetime.timestamp, which is the total number of seconds actual elapsed + * since 1970-01-01T00:00:00Z in UTC). + * + * This is an ambiguous question because "local time" can be ambiguous ? but it + * is disambiguated by the `fold` parameter, so we allocate two arrays: + * + * trans_local[0]: The wall-time transitions for fold=0 + * trans_local[1]: The wall-time transitions for fold=1 + * + * This returns 0 on success and a negative number of failure. The trans_local + * arrays must be freed if they are not NULL. + */ +static int +ts_to_local(size_t *trans_idx, int64_t *trans_utc, long *utcoff, + int64_t *trans_local[2], size_t num_ttinfos, + size_t num_transitions) +{ + if (num_transitions == 0) { + return 0; + } + + // Copy the UTC transitions into each array to be modified in place later + for (size_t i = 0; i < 2; ++i) { + trans_local[i] = PyMem_Malloc(num_transitions * sizeof(int64_t)); + if (trans_local[i] == NULL) { + return -1; + } + + memcpy(trans_local[i], trans_utc, num_transitions * sizeof(int64_t)); + } + + int64_t offset_0, offset_1, buff; + if (num_ttinfos > 1) { + offset_0 = utcoff[0]; + offset_1 = utcoff[trans_idx[0]]; + + if (offset_1 > offset_0) { + _swap(offset_0, offset_1, buff); + } + } + else { + offset_0 = utcoff[0]; + offset_1 = utcoff[0]; + } + + trans_local[0][0] += offset_0; + trans_local[1][0] += offset_1; + + for (size_t i = 1; i < num_transitions; ++i) { + offset_0 = utcoff[trans_idx[i - 1]]; + offset_1 = utcoff[trans_idx[i]]; + + if (offset_1 > offset_0) { + _swap(offset_1, offset_0, buff); + } + + trans_local[0][i] += offset_0; + trans_local[1][i] += offset_1; + } + + return 0; +} + +/* Simple bisect_right binary search implementation */ +static size_t +_bisect(const int64_t value, const int64_t *arr, size_t size) +{ + size_t lo = 0; + size_t hi = size; + size_t m; + + while (lo < hi) { + m = (lo + hi) / 2; + if (arr[m] > value) { + hi = m; + } + else { + lo = m + 1; + } + } + + return hi; +} + +/* Find the ttinfo rules that apply at a given local datetime. */ +static _ttinfo * +find_ttinfo(PyZoneInfo_ZoneInfo *self, PyObject *dt) +{ + // datetime.time has a .tzinfo attribute that passes None as the dt + // argument; it only really has meaning for fixed-offset zones. + if (dt == Py_None) { + if (self->fixed_offset) { + return &(self->tzrule_after.std); + } + else { + return &NO_TTINFO; + } + } + + int64_t ts; + if (get_local_timestamp(dt, &ts)) { + return NULL; + } + + unsigned char fold = PyDateTime_DATE_GET_FOLD(dt); + assert(fold < 2); + int64_t *local_transitions = self->trans_list_wall[fold]; + size_t num_trans = self->num_transitions; + + if (num_trans && ts < local_transitions[0]) { + return self->ttinfo_before; + } + else if (!num_trans || ts > local_transitions[self->num_transitions - 1]) { + return find_tzrule_ttinfo(&(self->tzrule_after), ts, fold, + PyDateTime_GET_YEAR(dt)); + } + else { + size_t idx = _bisect(ts, local_transitions, self->num_transitions) - 1; + assert(idx < self->num_transitions); + return self->trans_ttinfos[idx]; + } +} + +static int +is_leap_year(int year) +{ + const unsigned int ayear = (unsigned int)year; + return ayear % 4 == 0 && (ayear % 100 != 0 || ayear % 400 == 0); +} + +/* Calculates ordinal datetime from year, month and day. */ +static int +ymd_to_ord(int y, int m, int d) +{ + y -= 1; + int days_before_year = (y * 365) + (y / 4) - (y / 100) + (y / 400); + int yearday = DAYS_BEFORE_MONTH[m]; + if (m > 2 && is_leap_year(y + 1)) { + yearday += 1; + } + + return days_before_year + yearday + d; +} + +/* Calculate the number of seconds since 1970-01-01 in local time. + * + * This gets a datetime in the same "units" as self->trans_list_wall so that we + * can easily determine which transitions a datetime falls between. See the + * comment above ts_to_local for more information. + * */ +static int +get_local_timestamp(PyObject *dt, int64_t *local_ts) +{ + assert(local_ts != NULL); + + int hour, minute, second; + int ord; + if (PyDateTime_CheckExact(dt)) { + int y = PyDateTime_GET_YEAR(dt); + int m = PyDateTime_GET_MONTH(dt); + int d = PyDateTime_GET_DAY(dt); + hour = PyDateTime_DATE_GET_HOUR(dt); + minute = PyDateTime_DATE_GET_MINUTE(dt); + second = PyDateTime_DATE_GET_SECOND(dt); + + ord = ymd_to_ord(y, m, d); + } + else { + PyObject *num = PyObject_CallMethod(dt, "toordinal", NULL); + if (num == NULL) { + return -1; + } + + ord = PyLong_AsLong(num); + Py_DECREF(num); + if (ord == -1 && PyErr_Occurred()) { + return -1; + } + + num = PyObject_GetAttrString(dt, "hour"); + if (num == NULL) { + return -1; + } + hour = PyLong_AsLong(num); + Py_DECREF(num); + if (hour == -1) { + return -1; + } + + num = PyObject_GetAttrString(dt, "minute"); + if (num == NULL) { + return -1; + } + minute = PyLong_AsLong(num); + Py_DECREF(num); + if (minute == -1) { + return -1; + } + + num = PyObject_GetAttrString(dt, "second"); + if (num == NULL) { + return -1; + } + second = PyLong_AsLong(num); + Py_DECREF(num); + if (second == -1) { + return -1; + } + } + + *local_ts = (int64_t)(ord - EPOCHORDINAL) * 86400 + + (int64_t)(hour * 3600 + minute * 60 + second); + + return 0; +} + +///// +// Functions for cache handling + +/* Constructor for StrongCacheNode */ +static StrongCacheNode * +strong_cache_node_new(PyObject *key, PyObject *zone) +{ + StrongCacheNode *node = PyMem_Malloc(sizeof(StrongCacheNode)); + if (node == NULL) { + return NULL; + } + + Py_INCREF(key); + Py_INCREF(zone); + + node->next = NULL; + node->prev = NULL; + node->key = key; + node->zone = zone; + + return node; +} + +/* Destructor for StrongCacheNode */ +void +strong_cache_node_free(StrongCacheNode *node) +{ + Py_XDECREF(node->key); + Py_XDECREF(node->zone); + + PyMem_Free(node); +} + +/* Frees all nodes at or after a specified root in the strong cache. + * + * This can be used on the root node to free the entire cache or it can be used + * to clear all nodes that have been expired (which, if everything is going + * right, will actually only be 1 node at a time). + */ +void +strong_cache_free(StrongCacheNode *root) +{ + StrongCacheNode *node = root; + StrongCacheNode *next_node; + while (node != NULL) { + next_node = node->next; + strong_cache_node_free(node); + + node = next_node; + } +} + +/* Removes a node from the cache and update its neighbors. + * + * This is used both when ejecting a node from the cache and when moving it to + * the front of the cache. + */ +static void +remove_from_strong_cache(StrongCacheNode *node) +{ + if (ZONEINFO_STRONG_CACHE == node) { + ZONEINFO_STRONG_CACHE = node->next; + } + + if (node->prev != NULL) { + node->prev->next = node->next; + } + + if (node->next != NULL) { + node->next->prev = node->prev; + } + + node->next = NULL; + node->prev = NULL; +} + +/* Retrieves the node associated with a key, if it exists. + * + * This traverses the strong cache until it finds a matching key and returns a + * pointer to the relevant node if found. Returns NULL if no node is found. + * + * root may be NULL, indicating an empty cache. + */ +static StrongCacheNode * +find_in_strong_cache(const StrongCacheNode *const root, PyObject *const key) +{ + const StrongCacheNode *node = root; + while (node != NULL) { + if (PyObject_RichCompareBool(key, node->key, Py_EQ)) { + return (StrongCacheNode *)node; + } + + node = node->next; + } + + return NULL; +} + +/* Ejects a given key from the class's strong cache, if applicable. + * + * This function is used to enable the per-key functionality in clear_cache. + */ +static void +eject_from_strong_cache(const PyTypeObject *const type, PyObject *key) +{ + if (type != &PyZoneInfo_ZoneInfoType) { + return; + } + + StrongCacheNode *node = find_in_strong_cache(ZONEINFO_STRONG_CACHE, key); + if (node != NULL) { + remove_from_strong_cache(node); + + strong_cache_node_free(node); + } +} + +/* Moves a node to the front of the LRU cache. + * + * The strong cache is an LRU cache, so whenever a given node is accessed, if + * it is not at the front of the cache, it needs to be moved there. + */ +static void +move_strong_cache_node_to_front(StrongCacheNode **root, StrongCacheNode *node) +{ + StrongCacheNode *root_p = *root; + if (root_p == node) { + return; + } + + remove_from_strong_cache(node); + + node->prev = NULL; + node->next = root_p; + + if (root_p != NULL) { + root_p->prev = node; + } + + *root = node; +} + +/* Retrieves a ZoneInfo from the strong cache if it's present. + * + * This function finds the ZoneInfo by key and if found will move the node to + * the front of the LRU cache and return a new reference to it. It returns NULL + * if the key is not in the cache. + * + * The strong cache is currently only implemented for the base class, so this + * always returns a cache miss for subclasses. + */ +static PyObject * +zone_from_strong_cache(const PyTypeObject *const type, PyObject *const key) +{ + if (type != &PyZoneInfo_ZoneInfoType) { + return NULL; // Strong cache currently only implemented for base class + } + + StrongCacheNode *node = find_in_strong_cache(ZONEINFO_STRONG_CACHE, key); + + if (node != NULL) { + move_strong_cache_node_to_front(&ZONEINFO_STRONG_CACHE, node); + Py_INCREF(node->zone); + return node->zone; + } + + return NULL; // Cache miss +} + +/* Inserts a new key into the strong LRU cache. + * + * This function is only to be used after a cache miss ? it creates a new node + * at the front of the cache and ejects any stale entries (keeping the size of + * the cache to at most ZONEINFO_STRONG_CACHE_MAX_SIZE). + */ +static void +update_strong_cache(const PyTypeObject *const type, PyObject *key, + PyObject *zone) +{ + if (type != &PyZoneInfo_ZoneInfoType) { + return; + } + + StrongCacheNode *new_node = strong_cache_node_new(key, zone); + + move_strong_cache_node_to_front(&ZONEINFO_STRONG_CACHE, new_node); + + StrongCacheNode *node = new_node->next; + for (size_t i = 1; i < ZONEINFO_STRONG_CACHE_MAX_SIZE; ++i) { + if (node == NULL) { + return; + } + node = node->next; + } + + // Everything beyond this point needs to be freed + if (node != NULL) { + if (node->prev != NULL) { + node->prev->next = NULL; + } + strong_cache_free(node); + } +} + +/* Clears all entries into a type's strong cache. + * + * Because the strong cache is not implemented for subclasses, this is a no-op + * for everything except the base class. + */ +void +clear_strong_cache(const PyTypeObject *const type) +{ + if (type != &PyZoneInfo_ZoneInfoType) { + return; + } + + strong_cache_free(ZONEINFO_STRONG_CACHE); +} + +static PyObject * +new_weak_cache() +{ + PyObject *weakref_module = PyImport_ImportModule("weakref"); + if (weakref_module == NULL) { + return NULL; + } + + PyObject *weak_cache = + PyObject_CallMethod(weakref_module, "WeakValueDictionary", ""); + Py_DECREF(weakref_module); + return weak_cache; +} + +static int +initialize_caches() +{ + if (TIMEDELTA_CACHE == NULL) { + TIMEDELTA_CACHE = PyDict_New(); + } + else { + Py_INCREF(TIMEDELTA_CACHE); + } + + if (TIMEDELTA_CACHE == NULL) { + return -1; + } + + if (ZONEINFO_WEAK_CACHE == NULL) { + ZONEINFO_WEAK_CACHE = new_weak_cache(); + } + else { + Py_INCREF(ZONEINFO_WEAK_CACHE); + } + + if (ZONEINFO_WEAK_CACHE == NULL) { + return -1; + } + + return 0; +} + +static PyObject * +zoneinfo_init_subclass(PyTypeObject *cls, PyObject *args, PyObject **kwargs) +{ + PyObject *weak_cache = new_weak_cache(); + if (weak_cache == NULL) { + return NULL; + } + + PyObject_SetAttrString((PyObject *)cls, "_weak_cache", weak_cache); + Py_RETURN_NONE; +} + +///// +// Specify the ZoneInfo type +static PyMethodDef zoneinfo_methods[] = { + {"clear_cache", (PyCFunction)(void (*)(void))zoneinfo_clear_cache, + METH_VARARGS | METH_KEYWORDS | METH_CLASS, + PyDoc_STR("Clear the ZoneInfo cache.")}, + {"no_cache", (PyCFunction)(void (*)(void))zoneinfo_no_cache, + METH_VARARGS | METH_KEYWORDS | METH_CLASS, + PyDoc_STR("Get a new instance of ZoneInfo, bypassing the cache.")}, + {"from_file", (PyCFunction)(void (*)(void))zoneinfo_from_file, + METH_VARARGS | METH_KEYWORDS | METH_CLASS, + PyDoc_STR("Create a ZoneInfo file from a file object.")}, + {"utcoffset", (PyCFunction)zoneinfo_utcoffset, METH_O, + PyDoc_STR("Retrieve a timedelta representing the UTC offset in a zone at " + "the given datetime.")}, + {"dst", (PyCFunction)zoneinfo_dst, METH_O, + PyDoc_STR("Retrieve a timedelta representing the amount of DST applied " + "in a zone at the given datetime.")}, + {"tzname", (PyCFunction)zoneinfo_tzname, METH_O, + PyDoc_STR("Retrieve a string containing the abbreviation for the time " + "zone that applies in a zone at a given datetime.")}, + {"fromutc", (PyCFunction)zoneinfo_fromutc, METH_O, + PyDoc_STR("Given a datetime with local time in UTC, retrieve an adjusted " + "datetime in local time.")}, + {"__reduce__", (PyCFunction)zoneinfo_reduce, METH_NOARGS, + PyDoc_STR("Function for serialization with the pickle protocol.")}, + {"_unpickle", (PyCFunction)zoneinfo__unpickle, METH_VARARGS | METH_CLASS, + PyDoc_STR("Private method used in unpickling.")}, + {"__init_subclass__", (PyCFunction)(void (*)(void))zoneinfo_init_subclass, + METH_VARARGS | METH_KEYWORDS, + PyDoc_STR("Function to initialize subclasses.")}, + {NULL} /* Sentinel */ +}; + +static PyMemberDef zoneinfo_members[] = { + {.name = "key", + .offset = offsetof(PyZoneInfo_ZoneInfo, key), + .type = T_OBJECT_EX, + .flags = READONLY, + .doc = NULL}, + {NULL}, /* Sentinel */ +}; + +static PyTypeObject PyZoneInfo_ZoneInfoType = { + PyVarObject_HEAD_INIT(NULL, 0) // + .tp_name = "zoneinfo.ZoneInfo", + .tp_basicsize = sizeof(PyZoneInfo_ZoneInfo), + .tp_weaklistoffset = offsetof(PyZoneInfo_ZoneInfo, weakreflist), + .tp_repr = (reprfunc)zoneinfo_repr, + .tp_str = (reprfunc)zoneinfo_str, + .tp_getattro = PyObject_GenericGetAttr, + .tp_flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE), + /* .tp_doc = zoneinfo_doc, */ + .tp_methods = zoneinfo_methods, + .tp_members = zoneinfo_members, + .tp_new = zoneinfo_new, + .tp_dealloc = zoneinfo_dealloc, +}; + +///// +// Specify the _zoneinfo module +static PyMethodDef module_methods[] = {{NULL, NULL}}; +static void +module_free() +{ + Py_XDECREF(_tzpath_find_tzfile); + _tzpath_find_tzfile = NULL; + + Py_XDECREF(_common_mod); + _common_mod = NULL; + + Py_XDECREF(io_open); + io_open = NULL; + + xdecref_ttinfo(&NO_TTINFO); + + Py_XDECREF(TIMEDELTA_CACHE); + if (!Py_REFCNT(TIMEDELTA_CACHE)) { + TIMEDELTA_CACHE = NULL; + } + + Py_XDECREF(ZONEINFO_WEAK_CACHE); + if (!Py_REFCNT(ZONEINFO_WEAK_CACHE)) { + ZONEINFO_WEAK_CACHE = NULL; + } + + strong_cache_free(ZONEINFO_STRONG_CACHE); + ZONEINFO_STRONG_CACHE = NULL; +} + +static int +zoneinfomodule_exec(PyObject *m) +{ + PyDateTime_IMPORT; + PyZoneInfo_ZoneInfoType.tp_base = PyDateTimeAPI->TZInfoType; + if (PyType_Ready(&PyZoneInfo_ZoneInfoType) < 0) { + goto error; + } + + Py_INCREF(&PyZoneInfo_ZoneInfoType); + PyModule_AddObject(m, "ZoneInfo", (PyObject *)&PyZoneInfo_ZoneInfoType); + + /* Populate imports */ + PyObject *_tzpath_module = PyImport_ImportModule("zoneinfo._tzpath"); + if (_tzpath_module == NULL) { + goto error; + } + + _tzpath_find_tzfile = + PyObject_GetAttrString(_tzpath_module, "find_tzfile"); + Py_DECREF(_tzpath_module); + if (_tzpath_find_tzfile == NULL) { + goto error; + } + + PyObject *io_module = PyImport_ImportModule("io"); + if (io_module == NULL) { + goto error; + } + + io_open = PyObject_GetAttrString(io_module, "open"); + Py_DECREF(io_module); + if (io_open == NULL) { + goto error; + } + + _common_mod = PyImport_ImportModule("zoneinfo._common"); + if (_common_mod == NULL) { + goto error; + } + + if (NO_TTINFO.utcoff == NULL) { + NO_TTINFO.utcoff = Py_None; + NO_TTINFO.dstoff = Py_None; + NO_TTINFO.tzname = Py_None; + + for (size_t i = 0; i < 3; ++i) { + Py_INCREF(Py_None); + } + } + + if (initialize_caches()) { + goto error; + } + + return 0; + +error: + return -1; +} + +static PyModuleDef_Slot zoneinfomodule_slots[] = { + {Py_mod_exec, zoneinfomodule_exec}, {0, NULL}}; + +static struct PyModuleDef zoneinfomodule = { + PyModuleDef_HEAD_INIT, + .m_name = "_zoneinfo", + .m_doc = "C implementation of the zoneinfo module", + .m_size = 0, + .m_methods = module_methods, + .m_slots = zoneinfomodule_slots, + .m_free = (freefunc)module_free}; + +PyMODINIT_FUNC +PyInit__zoneinfo(void) +{ + return PyModuleDef_Init(&zoneinfomodule); +} diff --git a/PCbuild/_zoneinfo.vcxproj b/PCbuild/_zoneinfo.vcxproj new file mode 100644 index 0000000000000..6e6389c377339 --- /dev/null +++ b/PCbuild/_zoneinfo.vcxproj @@ -0,0 +1,109 @@ +? + + + + Debug + ARM + + + Debug + ARM64 + + + Debug + Win32 + + + Debug + x64 + + + PGInstrument + ARM + + + PGInstrument + ARM64 + + + PGInstrument + Win32 + + + PGInstrument + x64 + + + PGUpdate + ARM + + + PGUpdate + ARM64 + + + PGUpdate + Win32 + + + PGUpdate + x64 + + + Release + ARM + + + Release + ARM64 + + + Release + Win32 + + + Release + x64 + + + + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742} + _zoneinfo + Win32Proj + + + + + DynamicLibrary + NotSet + + + + .pyd + + + + + + + + + + <_ProjectFileVersion>10.0.30319.1 + + + + + + + + + + {cf7ac3d1-e2df-41d2-bea6-1e2556cdea26} + false + + + + + + diff --git a/PCbuild/_zoneinfo.vcxproj.filters b/PCbuild/_zoneinfo.vcxproj.filters new file mode 100644 index 0000000000000..e3c3ef4c72a7f --- /dev/null +++ b/PCbuild/_zoneinfo.vcxproj.filters @@ -0,0 +1,16 @@ +? + + + + + + + {2422278e-eeeb-4241-8182-433e2bc5a7fc} + + + + + Source Files + + + diff --git a/PCbuild/lib.pyproj b/PCbuild/lib.pyproj index ee01d109f162d..7ce88e5690b45 100644 --- a/PCbuild/lib.pyproj +++ b/PCbuild/lib.pyproj @@ -1396,6 +1396,10 @@ + + + + @@ -1563,6 +1567,10 @@ + + + + diff --git a/PCbuild/pcbuild.proj b/PCbuild/pcbuild.proj index 9c4d352b43448..4d416c589e4c4 100644 --- a/PCbuild/pcbuild.proj +++ b/PCbuild/pcbuild.proj @@ -51,7 +51,7 @@ - + diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index 6d4c9506e5ec1..61db4e02ad383 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -91,6 +91,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testconsole", "_testconsol EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_asyncio", "_asyncio.vcxproj", "{384C224A-7474-476E-A01B-750EA7DE918C}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_zoneinfo", "_zoneinfo.vcxproj", "{FCBE1EF2-E0F0-40B1-88B5-00A35D378742}" +EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_queue", "_queue.vcxproj", "{78D80A15-BD8C-44E2-B49E-1F05B0A0A687}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "liblzma", "liblzma.vcxproj", "{12728250-16EC-4DC6-94D7-E21DD88947F8}" diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt index 5fe3e8c36ecf5..c44910e9bfcf0 100644 --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -132,6 +132,7 @@ library which are implemented in C; each one builds a DLL (renamed to _asyncio _ctypes _ctypes_test +_zoneinfo _decimal _elementtree _hashlib diff --git a/Tools/msi/lib/lib_files.wxs b/Tools/msi/lib/lib_files.wxs index 95541599b9bb2..037fc38f1d9cb 100644 --- a/Tools/msi/lib/lib_files.wxs +++ b/Tools/msi/lib/lib_files.wxs @@ -1,6 +1,6 @@ ? - + diff --git a/configure b/configure index 64bcde6bfdfb9..56d66d0235289 100755 --- a/configure +++ b/configure @@ -658,6 +658,7 @@ LIBFFI_INCLUDEDIR PKG_CONFIG_LIBDIR PKG_CONFIG_PATH PKG_CONFIG +TZPATH SHLIBS CFLAGSFORSHARED LINKFORSHARED @@ -819,6 +820,7 @@ with_assertions enable_optimizations with_lto with_hash_algorithm +with_tzpath with_address_sanitizer with_memory_sanitizer with_undefined_behavior_sanitizer @@ -1525,6 +1527,9 @@ Optional Packages: --with-hash-algorithm=[fnv|siphash24] select hash algorithm for use in Python/pyhash.c (default is SipHash24) + --with-tzpath= + Select the default time zone search path for zoneinfo.TZPATH + --with-address-sanitizer enable AddressSanitizer memory error detector, 'asan' (default is no) @@ -10154,6 +10159,47 @@ $as_echo "default" >&6; } fi +validate_tzpath() { + # Checks that each element of hte path is an absolute path + if test -z "$1"; then + # Empty string is allowed: it indicates no system TZPATH + return 0 + fi + + # Bad paths are those that don't start with / + if ( echo $1 | grep -qE '(^|:)([^/]|$)' ); then + as_fn_error $? "--with-tzpath must contain only absolute paths, not $1" "$LINENO" 5 + return 1; + fi +} + +TZPATH="/usr/share/zoneinfo:/usr/lib/zoneinfo:/usr/share/lib/zoneinfo:/etc/zoneinfo" +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-tzpath" >&5 +$as_echo_n "checking for --with-tzpath... " >&6; } + +# Check whether --with-tzpath was given. +if test "${with_tzpath+set}" = set; then : + withval=$with_tzpath; +case "$withval" in + yes) + as_fn_error $? "--with-tzpath requires a value" "$LINENO" 5 + ;; + *) + validate_tzpath "$withval" + TZPATH="$withval" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: \"$withval\"" >&5 +$as_echo "\"$withval\"" >&6; } + ;; +esac + +else + validate_tzpath "$TZPATH" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: \"$TZPATH\"" >&5 +$as_echo "\"$TZPATH\"" >&6; } +fi + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-address-sanitizer" >&5 $as_echo_n "checking for --with-address-sanitizer... " >&6; } diff --git a/configure.ac b/configure.ac index 21c47b56358b1..497d7c191d537 100644 --- a/configure.ac +++ b/configure.ac @@ -2946,6 +2946,42 @@ esac ], [AC_MSG_RESULT(default)]) +validate_tzpath() { + # Checks that each element of hte path is an absolute path + if test -z "$1"; then + # Empty string is allowed: it indicates no system TZPATH + return 0 + fi + + # Bad paths are those that don't start with / + dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output + if ( echo $1 | grep -qE '(^|:)(@<:@^/@:>@|$)' ); then + AC_MSG_ERROR([--with-tzpath must contain only absolute paths, not $1]) + return 1; + fi +} + +TZPATH="/usr/share/zoneinfo:/usr/lib/zoneinfo:/usr/share/lib/zoneinfo:/etc/zoneinfo" +AC_MSG_CHECKING(for --with-tzpath) +AC_ARG_WITH(tzpath, + AS_HELP_STRING([--with-tzpath=] + [Select the default time zone search path for zoneinfo.TZPATH]), +[ +case "$withval" in + yes) + AC_MSG_ERROR([--with-tzpath requires a value]) + ;; + *) + validate_tzpath "$withval" + TZPATH="$withval" + AC_MSG_RESULT("$withval") + ;; +esac +], +[validate_tzpath "$TZPATH" + AC_MSG_RESULT("$TZPATH")]) +AC_SUBST(TZPATH) + AC_MSG_CHECKING(for --with-address-sanitizer) AC_ARG_WITH(address_sanitizer, AS_HELP_STRING([--with-address-sanitizer], diff --git a/setup.py b/setup.py index 794ba2f766237..68fc3120cc317 100644 --- a/setup.py +++ b/setup.py @@ -304,6 +304,17 @@ def find_library_file(compiler, libname, std_dirs, paths): else: assert False, "Internal error: Path not found in std_dirs or paths" +def validate_tzpath(): + base_tzpath = sysconfig.get_config_var('TZPATH') + if not base_tzpath: + return + + tzpaths = base_tzpath.split(os.pathsep) + bad_paths = [tzpath for tzpath in tzpaths if not os.path.isabs(tzpath)] + if bad_paths: + raise ValueError('TZPATH must contain only absolute paths, ' + + f'found:\n{tzpaths!r}\nwith invalid paths:\n' + + f'{bad_paths!r}') def find_module_file(module, dirlist): """Find a module in a set of possible folders. If it is not found @@ -816,6 +827,8 @@ def detect_simple_extensions(self): # uses modf(). self.add(Extension('_datetime', ['_datetimemodule.c'], libraries=['m'])) + # zoneinfo module + self.add(Extension('_zoneinfo', ['_zoneinfo.c'])), # random number generator implemented in C self.add(Extension("_random", ["_randommodule.c"], extra_compile_args=['-DPy_BUILD_CORE_MODULE'])) @@ -2495,6 +2508,7 @@ class DummyProcess: ProcessPoolExecutor = None sys.modules['concurrent.futures.process'] = DummyProcess + validate_tzpath() # turn off warnings when deprecated modules are imported import warnings From webhook-mailer at python.org Sat May 16 04:33:51 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 16 May 2020 08:33:51 -0000 Subject: [Python-checkins] bpo-40457: Support OpenSSL without TLS 1.0/1.1 (GH-19862) Message-ID: https://github.com/python/cpython/commit/a669443dfb79fc6aca2544b885895814798db15b commit: a669443dfb79fc6aca2544b885895814798db15b branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-16T01:33:42-07:00 summary: bpo-40457: Support OpenSSL without TLS 1.0/1.1 (GH-19862) OpenSSL can be build without support for TLS 1.0 and 1.1. The ssl module now correctly adheres to OPENSSL_NO_TLS1 and OPENSSL_NO_TLS1_1 flags. Also update multissltest to test with latest OpenSSL and LibreSSL releases. Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran (cherry picked from commit 6e8cda91d92da72800d891b2fc2073ecbc134d98) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst M Modules/_ssl.c M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst b/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst new file mode 100644 index 0000000000000..19b6dd685cd8c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst @@ -0,0 +1 @@ +The ssl module now support OpenSSL builds without TLS 1.0 and 1.1 methods. diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 1da65eae7a8b4..b0e3c0432f51d 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -149,15 +149,6 @@ static void _PySSLFixErrno(void) { # define PY_OPENSSL_1_1_API 1 #endif -/* Openssl comes with TLSv1.1 and TLSv1.2 between 1.0.0h and 1.0.1 - http://www.openssl.org/news/changelog.html - */ -#if OPENSSL_VERSION_NUMBER >= 0x10001000L -# define HAVE_TLSv1_2 1 -#else -# define HAVE_TLSv1_2 0 -#endif - /* SNI support (client- and server-side) appeared in OpenSSL 1.0.0 and 0.9.8f * This includes the SSL_set_SSL_CTX() function. */ @@ -328,13 +319,9 @@ enum py_ssl_version { PY_SSL_VERSION_SSL2, PY_SSL_VERSION_SSL3=1, PY_SSL_VERSION_TLS, /* SSLv23 */ -#if HAVE_TLSv1_2 PY_SSL_VERSION_TLS1, PY_SSL_VERSION_TLS1_1, PY_SSL_VERSION_TLS1_2, -#else - PY_SSL_VERSION_TLS1, -#endif PY_SSL_VERSION_TLS_CLIENT=0x10, PY_SSL_VERSION_TLS_SERVER, }; @@ -3088,35 +3075,45 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) #endif PySSL_BEGIN_ALLOW_THREADS - if (proto_version == PY_SSL_VERSION_TLS1) + switch(proto_version) { +#if defined(SSL3_VERSION) && !defined(OPENSSL_NO_SSL3) + case PY_SSL_VERSION_SSL3: + ctx = SSL_CTX_new(SSLv3_method()); + break; +#endif +#if defined(TLS1_VERSION) && !defined(OPENSSL_NO_TLS1) + case PY_SSL_VERSION_TLS1: ctx = SSL_CTX_new(TLSv1_method()); -#if HAVE_TLSv1_2 - else if (proto_version == PY_SSL_VERSION_TLS1_1) - ctx = SSL_CTX_new(TLSv1_1_method()); - else if (proto_version == PY_SSL_VERSION_TLS1_2) - ctx = SSL_CTX_new(TLSv1_2_method()); + break; #endif -#ifndef OPENSSL_NO_SSL3 - else if (proto_version == PY_SSL_VERSION_SSL3) - ctx = SSL_CTX_new(SSLv3_method()); +#if defined(TLS1_1_VERSION) && !defined(OPENSSL_NO_TLS1_1) + case PY_SSL_VERSION_TLS1_1: + ctx = SSL_CTX_new(TLSv1_1_method()); + break; #endif -#ifndef OPENSSL_NO_SSL2 - else if (proto_version == PY_SSL_VERSION_SSL2) - ctx = SSL_CTX_new(SSLv2_method()); +#if defined(TLS1_2_VERSION) && !defined(OPENSSL_NO_TLS1_2) + case PY_SSL_VERSION_TLS1_2: + ctx = SSL_CTX_new(TLSv1_2_method()); + break; #endif - else if (proto_version == PY_SSL_VERSION_TLS) /* SSLv23 */ + case PY_SSL_VERSION_TLS: + /* SSLv23 */ ctx = SSL_CTX_new(TLS_method()); - else if (proto_version == PY_SSL_VERSION_TLS_CLIENT) + break; + case PY_SSL_VERSION_TLS_CLIENT: ctx = SSL_CTX_new(TLS_client_method()); - else if (proto_version == PY_SSL_VERSION_TLS_SERVER) + break; + case PY_SSL_VERSION_TLS_SERVER: ctx = SSL_CTX_new(TLS_server_method()); - else + break; + default: proto_version = -1; + } PySSL_END_ALLOW_THREADS if (proto_version == -1) { PyErr_SetString(PyExc_ValueError, - "invalid protocol version"); + "invalid or unsupported protocol version"); return NULL; } if (ctx == NULL) { @@ -6188,12 +6185,10 @@ PyInit__ssl(void) PY_SSL_VERSION_TLS_SERVER); PyModule_AddIntConstant(m, "PROTOCOL_TLSv1", PY_SSL_VERSION_TLS1); -#if HAVE_TLSv1_2 PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_1", PY_SSL_VERSION_TLS1_1); PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_2", PY_SSL_VERSION_TLS1_2); -#endif /* protocol options */ PyModule_AddIntConstant(m, "OP_ALL", @@ -6201,10 +6196,8 @@ PyInit__ssl(void) PyModule_AddIntConstant(m, "OP_NO_SSLv2", SSL_OP_NO_SSLv2); PyModule_AddIntConstant(m, "OP_NO_SSLv3", SSL_OP_NO_SSLv3); PyModule_AddIntConstant(m, "OP_NO_TLSv1", SSL_OP_NO_TLSv1); -#if HAVE_TLSv1_2 PyModule_AddIntConstant(m, "OP_NO_TLSv1_1", SSL_OP_NO_TLSv1_1); PyModule_AddIntConstant(m, "OP_NO_TLSv1_2", SSL_OP_NO_TLSv1_2); -#endif #ifdef SSL_OP_NO_TLSv1_3 PyModule_AddIntConstant(m, "OP_NO_TLSv1_3", SSL_OP_NO_TLSv1_3); #else diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 0e37ec1bba93b..12af98d12c45d 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -43,20 +43,21 @@ log = logging.getLogger("multissl") OPENSSL_OLD_VERSIONS = [ + "1.0.2u", + "1.1.0l", ] OPENSSL_RECENT_VERSIONS = [ - "1.0.2u", - "1.1.0l", "1.1.1g", # "3.0.0-alpha2" ] LIBRESSL_OLD_VERSIONS = [ + "2.9.2", ] LIBRESSL_RECENT_VERSIONS = [ - "2.9.2", + "3.1.0", ] # store files in ../multissl @@ -80,7 +81,7 @@ parser.add_argument( '--disable-ancient', action='store_true', - help="Don't test OpenSSL < 1.0.2 and LibreSSL < 2.5.3.", + help="Don't test OpenSSL and LibreSSL versions without upstream support", ) parser.add_argument( '--openssl', From webhook-mailer at python.org Sat May 16 04:45:11 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 16 May 2020 08:45:11 -0000 Subject: [Python-checkins] [3.7] bpo-40457: Support OpenSSL without TLS 1.0/1.1 (GH-19862) (GH-20126) Message-ID: https://github.com/python/cpython/commit/43b355e53fd0796990a8810cd3461c197e20a3b9 commit: 43b355e53fd0796990a8810cd3461c197e20a3b9 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-16T10:45:06+02:00 summary: [3.7] bpo-40457: Support OpenSSL without TLS 1.0/1.1 (GH-19862) (GH-20126) files: A Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst M Modules/_ssl.c M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst b/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst new file mode 100644 index 0000000000000..19b6dd685cd8c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst @@ -0,0 +1 @@ +The ssl module now support OpenSSL builds without TLS 1.0 and 1.1 methods. diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 94606ef0e2993..bc412ac139476 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -149,15 +149,6 @@ static void _PySSLFixErrno(void) { # define PY_OPENSSL_1_1_API 1 #endif -/* Openssl comes with TLSv1.1 and TLSv1.2 between 1.0.0h and 1.0.1 - http://www.openssl.org/news/changelog.html - */ -#if OPENSSL_VERSION_NUMBER >= 0x10001000L -# define HAVE_TLSv1_2 1 -#else -# define HAVE_TLSv1_2 0 -#endif - /* SNI support (client- and server-side) appeared in OpenSSL 1.0.0 and 0.9.8f * This includes the SSL_set_SSL_CTX() function. */ @@ -324,13 +315,9 @@ enum py_ssl_version { PY_SSL_VERSION_SSL2, PY_SSL_VERSION_SSL3=1, PY_SSL_VERSION_TLS, /* SSLv23 */ -#if HAVE_TLSv1_2 PY_SSL_VERSION_TLS1, PY_SSL_VERSION_TLS1_1, PY_SSL_VERSION_TLS1_2, -#else - PY_SSL_VERSION_TLS1, -#endif PY_SSL_VERSION_TLS_CLIENT=0x10, PY_SSL_VERSION_TLS_SERVER, }; @@ -3030,35 +3017,45 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) #endif PySSL_BEGIN_ALLOW_THREADS - if (proto_version == PY_SSL_VERSION_TLS1) + switch(proto_version) { +#if defined(SSL3_VERSION) && !defined(OPENSSL_NO_SSL3) + case PY_SSL_VERSION_SSL3: + ctx = SSL_CTX_new(SSLv3_method()); + break; +#endif +#if defined(TLS1_VERSION) && !defined(OPENSSL_NO_TLS1) + case PY_SSL_VERSION_TLS1: ctx = SSL_CTX_new(TLSv1_method()); -#if HAVE_TLSv1_2 - else if (proto_version == PY_SSL_VERSION_TLS1_1) - ctx = SSL_CTX_new(TLSv1_1_method()); - else if (proto_version == PY_SSL_VERSION_TLS1_2) - ctx = SSL_CTX_new(TLSv1_2_method()); + break; #endif -#ifndef OPENSSL_NO_SSL3 - else if (proto_version == PY_SSL_VERSION_SSL3) - ctx = SSL_CTX_new(SSLv3_method()); +#if defined(TLS1_1_VERSION) && !defined(OPENSSL_NO_TLS1_1) + case PY_SSL_VERSION_TLS1_1: + ctx = SSL_CTX_new(TLSv1_1_method()); + break; #endif -#ifndef OPENSSL_NO_SSL2 - else if (proto_version == PY_SSL_VERSION_SSL2) - ctx = SSL_CTX_new(SSLv2_method()); +#if defined(TLS1_2_VERSION) && !defined(OPENSSL_NO_TLS1_2) + case PY_SSL_VERSION_TLS1_2: + ctx = SSL_CTX_new(TLSv1_2_method()); + break; #endif - else if (proto_version == PY_SSL_VERSION_TLS) /* SSLv23 */ + case PY_SSL_VERSION_TLS: + /* SSLv23 */ ctx = SSL_CTX_new(TLS_method()); - else if (proto_version == PY_SSL_VERSION_TLS_CLIENT) + break; + case PY_SSL_VERSION_TLS_CLIENT: ctx = SSL_CTX_new(TLS_client_method()); - else if (proto_version == PY_SSL_VERSION_TLS_SERVER) + break; + case PY_SSL_VERSION_TLS_SERVER: ctx = SSL_CTX_new(TLS_server_method()); - else + break; + default: proto_version = -1; + } PySSL_END_ALLOW_THREADS if (proto_version == -1) { PyErr_SetString(PyExc_ValueError, - "invalid protocol version"); + "invalid or unsupported protocol version"); return NULL; } if (ctx == NULL) { @@ -6055,12 +6052,10 @@ PyInit__ssl(void) PY_SSL_VERSION_TLS_SERVER); PyModule_AddIntConstant(m, "PROTOCOL_TLSv1", PY_SSL_VERSION_TLS1); -#if HAVE_TLSv1_2 PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_1", PY_SSL_VERSION_TLS1_1); PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_2", PY_SSL_VERSION_TLS1_2); -#endif /* protocol options */ PyModule_AddIntConstant(m, "OP_ALL", @@ -6068,10 +6063,8 @@ PyInit__ssl(void) PyModule_AddIntConstant(m, "OP_NO_SSLv2", SSL_OP_NO_SSLv2); PyModule_AddIntConstant(m, "OP_NO_SSLv3", SSL_OP_NO_SSLv3); PyModule_AddIntConstant(m, "OP_NO_TLSv1", SSL_OP_NO_TLSv1); -#if HAVE_TLSv1_2 PyModule_AddIntConstant(m, "OP_NO_TLSv1_1", SSL_OP_NO_TLSv1_1); PyModule_AddIntConstant(m, "OP_NO_TLSv1_2", SSL_OP_NO_TLSv1_2); -#endif #ifdef SSL_OP_NO_TLSv1_3 PyModule_AddIntConstant(m, "OP_NO_TLSv1_3", SSL_OP_NO_TLSv1_3); #else diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 0e37ec1bba93b..12af98d12c45d 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -43,20 +43,21 @@ log = logging.getLogger("multissl") OPENSSL_OLD_VERSIONS = [ + "1.0.2u", + "1.1.0l", ] OPENSSL_RECENT_VERSIONS = [ - "1.0.2u", - "1.1.0l", "1.1.1g", # "3.0.0-alpha2" ] LIBRESSL_OLD_VERSIONS = [ + "2.9.2", ] LIBRESSL_RECENT_VERSIONS = [ - "2.9.2", + "3.1.0", ] # store files in ../multissl @@ -80,7 +81,7 @@ parser.add_argument( '--disable-ancient', action='store_true', - help="Don't test OpenSSL < 1.0.2 and LibreSSL < 2.5.3.", + help="Don't test OpenSSL and LibreSSL versions without upstream support", ) parser.add_argument( '--openssl', From webhook-mailer at python.org Sat May 16 05:39:13 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 16 May 2020 09:39:13 -0000 Subject: [Python-checkins] bpo-40192: Use thread_cputime for time.thread_time to improve resolution (GH-19381) Message-ID: https://github.com/python/cpython/commit/45410862321ae509e8753f239b0ea28fdcef5bad commit: 45410862321ae509e8753f239b0ea28fdcef5bad branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-16T11:39:09+02:00 summary: bpo-40192: Use thread_cputime for time.thread_time to improve resolution (GH-19381) On AIX, time.thread_time() is now implemented with thread_cputime() which has nanosecond resolution, rather than clock_gettime(CLOCK_THREAD_CPUTIME_ID) which has a resolution of 10 ms. files: A Misc/NEWS.d/next/Library/2020-04-05-04-16-14.bpo-40192.nk8uRJ.rst M Doc/whatsnew/3.9.rst M Modules/timemodule.c diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index c721a167440c3..cbddbb4f3f962 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -480,6 +480,14 @@ The :mod:`socket` module now exports the :data:`~socket.CAN_RAW_JOIN_FILTERS` constant on Linux 4.1 and greater. (Contributed by Stefan Tatschner and Zackery Spytz in :issue:`25780`.) +time +---- + +On AIX, :func:`~time.thread_time` is now implemented with ``thread_cputime()`` +which has nanosecond resolution, rather than +``clock_gettime(CLOCK_THREAD_CPUTIME_ID)`` which has a resolution of 10 ms. +(Contributed by Batuhan Taskaya in :issue:`40192`) + sys --- diff --git a/Misc/NEWS.d/next/Library/2020-04-05-04-16-14.bpo-40192.nk8uRJ.rst b/Misc/NEWS.d/next/Library/2020-04-05-04-16-14.bpo-40192.nk8uRJ.rst new file mode 100644 index 0000000000000..e1e7fcefe3f94 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-05-04-16-14.bpo-40192.nk8uRJ.rst @@ -0,0 +1,4 @@ +On AIX, :func:`~time.thread_time` is now implemented with ``thread_cputime()`` +which has nanosecond resolution, rather than +``clock_gettime(CLOCK_THREAD_CPUTIME_ID)`` which has a resolution of 10 ms. +Patch by Batuhan Taskaya. diff --git a/Modules/timemodule.c b/Modules/timemodule.c index a0e66ac170b21..8a4d149befb52 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -24,6 +24,10 @@ # include #endif +#if defined(_AIX) +# include +#endif + #if defined(__WATCOMC__) && !defined(__QNX__) # include #else @@ -1343,6 +1347,30 @@ _PyTime_GetThreadTimeWithInfo(_PyTime_t *tp, _Py_clock_info_t *info) return 0; } +#elif defined(_AIX) +#define HAVE_THREAD_TIME +static int +_PyTime_GetThreadTimeWithInfo(_PyTime_t *tp, _Py_clock_info_t *info) +{ + /* bpo-40192: On AIX, thread_cputime() is preferred: it has nanosecond + resolution, whereas clock_gettime(CLOCK_THREAD_CPUTIME_ID) + has a resolution of 10 ms. */ + thread_cputime_t tc; + if (thread_cputime(-1, &tc) != 0) { + PyErr_SetFromErrno(PyExc_OSError); + return -1; + } + + if (info) { + info->implementation = "thread_cputime()"; + info->monotonic = 1; + info->adjustable = 0; + info->resolution = 1e-9; + } + *tp = _PyTime_FromNanoseconds(tc.stime + tc.utime); + return 0; +} + #elif defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_PROCESS_CPUTIME_ID) #define HAVE_THREAD_TIME static int From webhook-mailer at python.org Sat May 16 06:01:47 2020 From: webhook-mailer at python.org (Hai Shi) Date: Sat, 16 May 2020 10:01:47 -0000 Subject: [Python-checkins] bpo-40275: lazy import modules in test.support (GH-20128) Message-ID: https://github.com/python/cpython/commit/372fa3ead584876a975a61936b376259be636d27 commit: 372fa3ead584876a975a61936b376259be636d27 branch: master author: Hai Shi committer: GitHub date: 2020-05-16T03:01:39-07:00 summary: bpo-40275: lazy import modules in test.support (GH-20128) Automerge-Triggered-By: @vstinner files: M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index d2418282afc11..a9f9908c7fac9 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -6,23 +6,19 @@ import collections.abc import contextlib import errno -import faulthandler import fnmatch import functools -import gc import glob import importlib import importlib.util import os import platform import re -import shutil import stat import struct import subprocess import sys import sysconfig -import tempfile import _thread import threading import time @@ -439,6 +435,7 @@ def _longpath(path): _rmdir = os.rmdir def _rmtree(path): + import shutil try: shutil.rmtree(path) return @@ -891,6 +888,7 @@ def temp_dir(path=None, quiet=False): created, only a warning is issued. """ + import tempfile dir_created = False if path is None: path = tempfile.mkdtemp() @@ -1436,6 +1434,7 @@ def gc_collect(): longer than expected. This function tries its best to force all garbage objects to disappear. """ + import gc gc.collect() if is_jython: time.sleep(0.1) @@ -1444,6 +1443,7 @@ def gc_collect(): @contextlib.contextmanager def disable_gc(): + import gc have_gc = gc.isenabled() gc.disable() try: @@ -2136,6 +2136,7 @@ def reap_children(): @contextlib.contextmanager def start_threads(threads, unlock=None): + import faulthandler threads = list(threads) started = [] try: @@ -2401,6 +2402,7 @@ def call_link(self, *args, returncode=0): _can_xattr = None def can_xattr(): + import tempfile global _can_xattr if _can_xattr is not None: return _can_xattr @@ -2445,6 +2447,7 @@ def skip_if_pgo_task(test): def fs_is_case_insensitive(directory): """Detects if the file system for the specified directory is case-insensitive.""" + import tempfile with tempfile.NamedTemporaryFile(dir=directory) as base: base_path = base.name case_path = base_path.upper() @@ -2740,6 +2743,8 @@ def setswitchinterval(interval): @contextlib.contextmanager def disable_faulthandler(): + import faulthandler + # use sys.__stderr__ instead of sys.stderr, since regrtest replaces # sys.stderr with a StringIO which has no file descriptor when a test # is run with -W/--verbose3. From webhook-mailer at python.org Sat May 16 06:31:58 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Sat, 16 May 2020 10:31:58 -0000 Subject: [Python-checkins] bpo-39305: Update nntplib to merge nntplib.NNTP and nntplib._NNTPBase (GH-19817) Message-ID: https://github.com/python/cpython/commit/aa92a7cf210c98ad94229f282221136d846942db commit: aa92a7cf210c98ad94229f282221136d846942db branch: master author: Dong-hee Na committer: GitHub date: 2020-05-16T19:31:54+09:00 summary: bpo-39305: Update nntplib to merge nntplib.NNTP and nntplib._NNTPBase (GH-19817) files: A Misc/NEWS.d/next/Library/2020-05-01-00-22-58.bpo-39305.Cuwu_H.rst M Lib/nntplib.py M Lib/test/test_nntplib.py diff --git a/Lib/nntplib.py b/Lib/nntplib.py index aa9b46a8aaa39..f6e746e7c95c5 100644 --- a/Lib/nntplib.py +++ b/Lib/nntplib.py @@ -293,7 +293,7 @@ def _encrypt_on(sock, context, hostname): # The classes themselves -class _NNTPBase: +class NNTP: # UTF-8 is the character set for all NNTP commands and responses: they # are automatically encoded (when sending) and decoded (and receiving) # by this class. @@ -309,13 +309,18 @@ class _NNTPBase: encoding = 'utf-8' errors = 'surrogateescape' - def __init__(self, file, host, - readermode=None, timeout=_GLOBAL_DEFAULT_TIMEOUT): + def __init__(self, host, port=NNTP_PORT, user=None, password=None, + readermode=None, usenetrc=False, + timeout=_GLOBAL_DEFAULT_TIMEOUT): """Initialize an instance. Arguments: - - file: file-like object (open for read/write in binary mode) - - host: hostname of the server + - host: hostname to connect to + - port: port to connect to (default the standard NNTP port) + - user: username to authenticate with + - password: password to use with username - readermode: if true, send 'mode reader' command after connecting. + - usenetrc: allow loading username and password from ~/.netrc file + if not specified explicitly - timeout: timeout (in seconds) used for socket connections readermode is sometimes necessary if you are connecting to an @@ -325,7 +330,24 @@ def __init__(self, file, host, readermode. """ self.host = host - self.file = file + self.port = port + self.sock = self._create_socket(timeout) + self.file = None + try: + self.file = self.sock.makefile("rwb") + self._base_init(readermode) + if user or usenetrc: + self.login(user, password, usenetrc) + except: + if self.file: + self.file.close() + self.sock.close() + raise + + def _base_init(self, readermode): + """Partial initialization for the NNTP protocol. + This instance method is extracted for supporting the test code. + """ self.debugging = 0 self.welcome = self._getresp() @@ -370,6 +392,12 @@ def __exit__(self, *args): if is_connected(): self._close() + def _create_socket(self, timeout): + if timeout is not None and not timeout: + raise ValueError('Non-blocking socket (timeout=0) is not supported') + sys.audit("nntplib.connect", self, self.host, self.port) + return socket.create_connection((self.host, self.port), timeout) + def getwelcome(self): """Get the welcome message from the server (this is read and squirreled away by __init__()). @@ -888,8 +916,12 @@ def ihave(self, message_id, data): return self._post('IHAVE {0}'.format(message_id), data) def _close(self): - self.file.close() - del self.file + try: + if self.file: + self.file.close() + del self.file + finally: + self.sock.close() def quit(self): """Process a QUIT command and close the socket. Returns: @@ -979,56 +1011,6 @@ def starttls(self, context=None): raise NNTPError("TLS failed to start.") -class NNTP(_NNTPBase): - - def __init__(self, host, port=NNTP_PORT, user=None, password=None, - readermode=None, usenetrc=False, - timeout=_GLOBAL_DEFAULT_TIMEOUT): - """Initialize an instance. Arguments: - - host: hostname to connect to - - port: port to connect to (default the standard NNTP port) - - user: username to authenticate with - - password: password to use with username - - readermode: if true, send 'mode reader' command after - connecting. - - usenetrc: allow loading username and password from ~/.netrc file - if not specified explicitly - - timeout: timeout (in seconds) used for socket connections - - readermode is sometimes necessary if you are connecting to an - NNTP server on the local machine and intend to call - reader-specific commands, such as `group'. If you get - unexpected NNTPPermanentErrors, you might need to set - readermode. - """ - self.host = host - self.port = port - self.sock = self._create_socket(timeout) - file = None - try: - file = self.sock.makefile("rwb") - super().__init__(file, host, readermode, timeout) - if user or usenetrc: - self.login(user, password, usenetrc) - except: - if file: - file.close() - self.sock.close() - raise - - def _create_socket(self, timeout): - if timeout is not None and not timeout: - raise ValueError('Non-blocking socket (timeout=0) is not supported') - sys.audit("nntplib.connect", self, self.host, self.port) - return socket.create_connection((self.host, self.port), timeout) - - def _close(self): - try: - super()._close() - finally: - self.sock.close() - - if _have_ssl: class NNTP_SSL(NNTP): diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py index 8d296818e64f1..1df64fa7c6b00 100644 --- a/Lib/test/test_nntplib.py +++ b/Lib/test/test_nntplib.py @@ -5,6 +5,7 @@ import unittest import functools import contextlib +import nntplib import os.path import re import threading @@ -12,7 +13,6 @@ from test import support from test.support import socket_helper from nntplib import NNTP, GroupInfo -import nntplib from unittest.mock import patch try: import ssl @@ -411,6 +411,18 @@ def make_mock_file(handler): return (sio, file) +class NNTPServer(nntplib.NNTP): + + def __init__(self, f, host, readermode=None): + self.file = f + self.host = host + self._base_init(readermode) + + def _close(self): + self.file.close() + del self.file + + class MockedNNTPTestsMixin: # Override in derived classes handler_class = None @@ -426,7 +438,7 @@ def tearDown(self): def make_server(self, *args, **kwargs): self.handler = self.handler_class() self.sio, file = make_mock_file(self.handler) - self.server = nntplib._NNTPBase(file, 'test.server', *args, **kwargs) + self.server = NNTPServer(file, 'test.server', *args, **kwargs) return self.server diff --git a/Misc/NEWS.d/next/Library/2020-05-01-00-22-58.bpo-39305.Cuwu_H.rst b/Misc/NEWS.d/next/Library/2020-05-01-00-22-58.bpo-39305.Cuwu_H.rst new file mode 100644 index 0000000000000..7c6fdb3ede1c6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-01-00-22-58.bpo-39305.Cuwu_H.rst @@ -0,0 +1,2 @@ +Update :mod:`nntplib` to merge :class:`nntplib.NNTP` and +:class:`nntplib._NNTPBase`. Patch by Dong-hee Na. From webhook-mailer at python.org Sat May 16 10:03:18 2020 From: webhook-mailer at python.org (Paul Ganssle) Date: Sat, 16 May 2020 14:03:18 -0000 Subject: [Python-checkins] bpo-24416: Return named tuple from date.isocalendar() (GH-20113) Message-ID: https://github.com/python/cpython/commit/1b97b9b0ad9a2ff8eb5c8f2e2e7c2aec1d13a330 commit: 1b97b9b0ad9a2ff8eb5c8f2e2e7c2aec1d13a330 branch: master author: Paul Ganssle committer: GitHub date: 2020-05-16T10:02:59-04:00 summary: bpo-24416: Return named tuple from date.isocalendar() (GH-20113) {date, datetime}.isocalendar() now return a private custom named tuple object IsoCalendarDate rather than a simple tuple. In order to leave IsocalendarDate as a private class and to improve what backwards compatibility is offered for pickling the result of a datetime.isocalendar() call, add a __reduce__ method to the named tuples that reduces them to plain tuples. (This is the part of this PR most likely to cause problems ? if it causes major issues, switching to a strucseq or equivalent would be prudent). The pure python implementation of IsoCalendarDate uses positional-only arguments, since it is private and only constructed by position anyway; the equivalent change in the argument clinic on the C side would require us to move the forward declaration of the type above the clinic import for whatever reason, so it seems preferable to hold off on that for now. bpo-24416: https://bugs.python.org/issue24416 Original PR by Dong-hee Na with only minor alterations by Paul Ganssle. Co-authored-by: Dong-hee Na files: A Misc/NEWS.d/next/Library/2019-09-01-15-17-49.bpo-24416.G8Ww1U.rst M Doc/library/datetime.rst M Doc/whatsnew/3.9.rst M Lib/datetime.py M Lib/test/datetimetester.py M Modules/_datetimemodule.c M Modules/clinic/_datetimemodule.c.h diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 22ecbb551d897..4daf5df0efb11 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -670,7 +670,8 @@ Instance methods: .. method:: date.isocalendar() - Return a 3-tuple, (ISO year, ISO week number, ISO weekday). + Return a :term:`named tuple` object with three components: ``year``, + ``week`` and ``weekday``. The ISO calendar is a widely used variant of the Gregorian calendar. [#]_ @@ -682,11 +683,14 @@ Instance methods: For example, 2004 begins on a Thursday, so the first week of ISO year 2004 begins on Monday, 29 Dec 2003 and ends on Sunday, 4 Jan 2004:: - >>> from datetime import date - >>> date(2003, 12, 29).isocalendar() - (2004, 1, 1) - >>> date(2004, 1, 4).isocalendar() - (2004, 1, 7) + >>> from datetime import date + >>> date(2003, 12, 29).isocalendar() + datetime.IsoCalendarDate(year=2004, week=1, weekday=1) + >>> date(2004, 1, 4).isocalendar() + datetime.IsoCalendarDate(year=2004, week=1, weekday=7) + + .. versionchanged:: 3.9 + Result changed from a tuple to a :term:`named tuple`. .. method:: date.isoformat() @@ -1397,8 +1401,8 @@ Instance methods: .. method:: datetime.isocalendar() - Return a 3-tuple, (ISO year, ISO week number, ISO weekday). The same as - ``self.date().isocalendar()``. + Return a :term:`named tuple` with three components: ``year``, ``week`` + and ``weekday``. The same as ``self.date().isocalendar()``. .. method:: datetime.isoformat(sep='T', timespec='auto') diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index cbddbb4f3f962..bddb7102d90ee 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -281,6 +281,13 @@ Add :func:`curses.get_escdelay`, :func:`curses.set_escdelay`, :func:`curses.get_tabsize`, and :func:`curses.set_tabsize` functions. (Contributed by Anthony Sottile in :issue:`38312`.) +datetime +-------- +The :meth:`~datetime.date.isocalendar()` of :class:`datetime.date` +and :meth:`~datetime.datetime.isocalendar()` of :class:`datetime.datetime` +methods now returns a :func:`~collections.namedtuple` instead of a :class:`tuple`. +(Contributed by Dong-hee Na in :issue:`24416`.) + fcntl ----- diff --git a/Lib/datetime.py b/Lib/datetime.py index 67555191d02c1..952aebfdec0a7 100644 --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1095,7 +1095,7 @@ def isoweekday(self): return self.toordinal() % 7 or 7 def isocalendar(self): - """Return a 3-tuple containing ISO year, week number, and weekday. + """Return a named tuple containing ISO year, week number, and weekday. The first ISO week of the year is the (Mon-Sun) week containing the year's first Thursday; everything else derives @@ -1120,7 +1120,7 @@ def isocalendar(self): if today >= _isoweek1monday(year+1): year += 1 week = 0 - return year, week+1, day+1 + return _IsoCalendarDate(year, week+1, day+1) # Pickle support. @@ -1210,6 +1210,36 @@ def __reduce__(self): else: return (self.__class__, args, state) + +class IsoCalendarDate(tuple): + + def __new__(cls, year, week, weekday, /): + return super().__new__(cls, (year, week, weekday)) + + @property + def year(self): + return self[0] + + @property + def week(self): + return self[1] + + @property + def weekday(self): + return self[2] + + def __reduce__(self): + # This code is intended to pickle the object without making the + # class public. See https://bugs.python.org/msg352381 + return (tuple, (tuple(self),)) + + def __repr__(self): + return (f'{self.__class__.__name__}' + f'(year={self[0]}, week={self[1]}, weekday={self[2]})') + + +_IsoCalendarDate = IsoCalendarDate +del IsoCalendarDate _tzinfo_class = tzinfo class time: @@ -1559,6 +1589,7 @@ def __reduce__(self): time.max = time(23, 59, 59, 999999) time.resolution = timedelta(microseconds=1) + class datetime(date): """datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]]) @@ -2514,7 +2545,7 @@ def _name_from_offset(delta): _format_time, _format_offset, _is_leap, _isoweek1monday, _math, _ord2ymd, _time, _time_class, _tzinfo_class, _wrap_strftime, _ymd2ord, _divide_and_round, _parse_isoformat_date, _parse_isoformat_time, - _parse_hh_mm_ss_ff) + _parse_hh_mm_ss_ff, _IsoCalendarDate) # XXX Since import * above excludes names that start with _, # docstring does not get overwritten. In the future, it may be # appropriate to maintain a single module level docstring and diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index 42e2cecaeb724..a9741d6d4062f 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -2,6 +2,7 @@ See http://www.zope.org/Members/fdrake/DateTimeWiki/TestCases """ +import io import itertools import bisect import copy @@ -1355,19 +1356,43 @@ def test_weekday(self): def test_isocalendar(self): # Check examples from # http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm - for i in range(7): - d = self.theclass(2003, 12, 22+i) - self.assertEqual(d.isocalendar(), (2003, 52, i+1)) - d = self.theclass(2003, 12, 29) + timedelta(i) - self.assertEqual(d.isocalendar(), (2004, 1, i+1)) - d = self.theclass(2004, 1, 5+i) - self.assertEqual(d.isocalendar(), (2004, 2, i+1)) - d = self.theclass(2009, 12, 21+i) - self.assertEqual(d.isocalendar(), (2009, 52, i+1)) - d = self.theclass(2009, 12, 28) + timedelta(i) - self.assertEqual(d.isocalendar(), (2009, 53, i+1)) - d = self.theclass(2010, 1, 4+i) - self.assertEqual(d.isocalendar(), (2010, 1, i+1)) + week_mondays = [ + ((2003, 12, 22), (2003, 52, 1)), + ((2003, 12, 29), (2004, 1, 1)), + ((2004, 1, 5), (2004, 2, 1)), + ((2009, 12, 21), (2009, 52, 1)), + ((2009, 12, 28), (2009, 53, 1)), + ((2010, 1, 4), (2010, 1, 1)), + ] + + test_cases = [] + for cal_date, iso_date in week_mondays: + base_date = self.theclass(*cal_date) + # Adds one test case for every day of the specified weeks + for i in range(7): + new_date = base_date + timedelta(i) + new_iso = iso_date[0:2] + (iso_date[2] + i,) + test_cases.append((new_date, new_iso)) + + for d, exp_iso in test_cases: + with self.subTest(d=d, comparison="tuple"): + self.assertEqual(d.isocalendar(), exp_iso) + + # Check that the tuple contents are accessible by field name + with self.subTest(d=d, comparison="fields"): + t = d.isocalendar() + self.assertEqual((t.year, t.week, t.weekday), exp_iso) + + def test_isocalendar_pickling(self): + """Test that the result of datetime.isocalendar() can be pickled. + + The result of a round trip should be a plain tuple. + """ + d = self.theclass(2019, 1, 1) + p = pickle.dumps(d.isocalendar()) + res = pickle.loads(p) + self.assertEqual(type(res), tuple) + self.assertEqual(res, (2019, 1, 2)) def test_iso_long_years(self): # Calculate long ISO years and compare to table from diff --git a/Misc/NEWS.d/next/Library/2019-09-01-15-17-49.bpo-24416.G8Ww1U.rst b/Misc/NEWS.d/next/Library/2019-09-01-15-17-49.bpo-24416.G8Ww1U.rst new file mode 100644 index 0000000000000..ee9af990f079d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-09-01-15-17-49.bpo-24416.G8Ww1U.rst @@ -0,0 +1,3 @@ +The ``isocalendar()`` methods of :class:`datetime.date` and +:class:`datetime.datetime` now return a :term:`named tuple` +instead of a :class:`tuple`. diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index 9bdc52e949718..7a5efd23b9e45 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -38,8 +38,9 @@ module datetime class datetime.datetime "PyDateTime_DateTime *" "&PyDateTime_DateTimeType" class datetime.date "PyDateTime_Date *" "&PyDateTime_DateType" +class datetime.IsoCalendarDate "PyDateTime_IsoCalendarDate *" "&PyDateTime_IsoCalendarDateType" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=25138ad6a696b785]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=81bec0fa19837f63]*/ #include "clinic/_datetimemodule.c.h" @@ -131,6 +132,7 @@ class datetime.date "PyDateTime_Date *" "&PyDateTime_DateType" static PyTypeObject PyDateTime_DateType; static PyTypeObject PyDateTime_DateTimeType; static PyTypeObject PyDateTime_DeltaType; +static PyTypeObject PyDateTime_IsoCalendarDateType; static PyTypeObject PyDateTime_TimeType; static PyTypeObject PyDateTime_TZInfoType; static PyTypeObject PyDateTime_TimeZoneType; @@ -3224,6 +3226,136 @@ date_isoweekday(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored)) return PyLong_FromLong(dow + 1); } +PyDoc_STRVAR(iso_calendar_date__doc__, +"The result of date.isocalendar() or datetime.isocalendar()\n\n\ +This object may be accessed either as a tuple of\n\ + ((year, week, weekday)\n\ +or via the object attributes as named in the above tuple."); + +typedef struct { + PyTupleObject tuple; +} PyDateTime_IsoCalendarDate; + +static PyObject * +iso_calendar_date_repr(PyDateTime_IsoCalendarDate *self) +{ + PyObject* year = PyTuple_GetItem((PyObject *)self, 0); + if (year == NULL) { + return NULL; + } + PyObject* week = PyTuple_GetItem((PyObject *)self, 1); + if (week == NULL) { + return NULL; + } + PyObject* weekday = PyTuple_GetItem((PyObject *)self, 2); + if (weekday == NULL) { + return NULL; + } + + return PyUnicode_FromFormat("%.200s(year=%S, week=%S, weekday=%S)", + Py_TYPE(self)->tp_name, year, week, weekday); +} + +static PyObject * +iso_calendar_date_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + // Construct the tuple that this reduces to + PyObject * reduce_tuple = Py_BuildValue( + "O((OOO))", &PyTuple_Type, + PyTuple_GET_ITEM(self, 0), + PyTuple_GET_ITEM(self, 1), + PyTuple_GET_ITEM(self, 2) + ); + + return reduce_tuple; +} + +static PyObject * +iso_calendar_date_year(PyDateTime_IsoCalendarDate *self, void *unused) +{ + PyObject *year = PyTuple_GetItem((PyObject *)self, 0); + if (year == NULL) { + return NULL; + } + Py_INCREF(year); + return year; +} + +static PyObject * +iso_calendar_date_week(PyDateTime_IsoCalendarDate *self, void *unused) +{ + PyObject *week = PyTuple_GetItem((PyObject *)self, 1); + if (week == NULL) { + return NULL; + } + Py_INCREF(week); + return week; +} + +static PyObject * +iso_calendar_date_weekday(PyDateTime_IsoCalendarDate *self, void *unused) +{ + PyObject *weekday = PyTuple_GetItem((PyObject *)self, 2); + if (weekday == NULL) { + return NULL; + } + Py_INCREF(weekday); + return weekday; +} + +static PyGetSetDef iso_calendar_date_getset[] = { + {"year", (getter)iso_calendar_date_year}, + {"week", (getter)iso_calendar_date_week}, + {"weekday", (getter)iso_calendar_date_weekday}, + {NULL} +}; + +static PyMethodDef iso_calendar_date_methods[] = { + {"__reduce__", (PyCFunction)iso_calendar_date_reduce, METH_NOARGS, + PyDoc_STR("__reduce__() -> (cls, state)")}, + {NULL, NULL}, +}; + +static PyTypeObject PyDateTime_IsoCalendarDateType = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_name = "datetime.IsoCalendarDate", + .tp_basicsize = sizeof(PyDateTime_IsoCalendarDate), + .tp_repr = (reprfunc) iso_calendar_date_repr, + .tp_flags = Py_TPFLAGS_DEFAULT, + .tp_doc = iso_calendar_date__doc__, + .tp_methods = iso_calendar_date_methods, + .tp_getset = iso_calendar_date_getset, + .tp_base = &PyTuple_Type, + .tp_new = iso_calendar_date_new, +}; + +/*[clinic input] + at classmethod +datetime.IsoCalendarDate.__new__ as iso_calendar_date_new + year: int + week: int + weekday: int +[clinic start generated code]*/ + +static PyObject * +iso_calendar_date_new_impl(PyTypeObject *type, int year, int week, + int weekday) +/*[clinic end generated code: output=383d33d8dc7183a2 input=4f2c663c9d19c4ee]*/ + +{ + PyDateTime_IsoCalendarDate *self; + self = (PyDateTime_IsoCalendarDate *) type->tp_alloc(type, 3); + if (self == NULL) { + return NULL; + } + + PyTuple_SET_ITEM(self, 0, PyLong_FromLong(year)); + PyTuple_SET_ITEM(self, 1, PyLong_FromLong(week)); + PyTuple_SET_ITEM(self, 2, PyLong_FromLong(weekday)); + + return (PyObject *)self; +} + static PyObject * date_isocalendar(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored)) { @@ -3243,7 +3375,13 @@ date_isocalendar(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored)) ++year; week = 0; } - return Py_BuildValue("iii", year, week + 1, day + 1); + + PyObject* v = iso_calendar_date_new_impl(&PyDateTime_IsoCalendarDateType, + year, week + 1, day + 1); + if (v == NULL) { + return NULL; + } + return v; } /* Miscellaneous methods. */ @@ -3382,7 +3520,7 @@ static PyMethodDef date_methods[] = { PyDoc_STR("Return time tuple, compatible with time.localtime().")}, {"isocalendar", (PyCFunction)date_isocalendar, METH_NOARGS, - PyDoc_STR("Return a 3-tuple containing ISO year, week number, and " + PyDoc_STR("Return a named tuple containing ISO year, week number, and " "weekday.")}, {"isoformat", (PyCFunction)date_isoformat, METH_NOARGS, @@ -6386,13 +6524,14 @@ PyInit__datetime(void) if (m == NULL) return NULL; + PyTypeObject *types[] = { &PyDateTime_DateType, &PyDateTime_DateTimeType, &PyDateTime_TimeType, &PyDateTime_DeltaType, &PyDateTime_TZInfoType, - &PyDateTime_TimeZoneType + &PyDateTime_TimeZoneType, }; for (size_t i = 0; i < Py_ARRAY_LENGTH(types); i++) { @@ -6401,6 +6540,11 @@ PyInit__datetime(void) } } + if (PyType_Ready(&PyDateTime_IsoCalendarDateType) < 0) { + return NULL; + } + Py_INCREF(&PyDateTime_IsoCalendarDateType); + /* timedelta values */ d = PyDateTime_DeltaType.tp_dict; diff --git a/Modules/clinic/_datetimemodule.c.h b/Modules/clinic/_datetimemodule.c.h index 447036ca03814..973a4ea025347 100644 --- a/Modules/clinic/_datetimemodule.c.h +++ b/Modules/clinic/_datetimemodule.c.h @@ -14,6 +14,60 @@ PyDoc_STRVAR(datetime_date_fromtimestamp__doc__, #define DATETIME_DATE_FROMTIMESTAMP_METHODDEF \ {"fromtimestamp", (PyCFunction)datetime_date_fromtimestamp, METH_O|METH_CLASS, datetime_date_fromtimestamp__doc__}, +static PyObject * +iso_calendar_date_new_impl(PyTypeObject *type, int year, int week, + int weekday); + +static PyObject * +iso_calendar_date_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"year", "week", "weekday", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "IsoCalendarDate", 0}; + PyObject *argsbuf[3]; + PyObject * const *fastargs; + Py_ssize_t nargs = PyTuple_GET_SIZE(args); + int year; + int week; + int weekday; + + fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 3, 3, 0, argsbuf); + if (!fastargs) { + goto exit; + } + if (PyFloat_Check(fastargs[0])) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + goto exit; + } + year = _PyLong_AsInt(fastargs[0]); + if (year == -1 && PyErr_Occurred()) { + goto exit; + } + if (PyFloat_Check(fastargs[1])) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + goto exit; + } + week = _PyLong_AsInt(fastargs[1]); + if (week == -1 && PyErr_Occurred()) { + goto exit; + } + if (PyFloat_Check(fastargs[2])) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + goto exit; + } + weekday = _PyLong_AsInt(fastargs[2]); + if (weekday == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = iso_calendar_date_new_impl(type, year, week, weekday); + +exit: + return return_value; +} + PyDoc_STRVAR(datetime_datetime_now__doc__, "now($type, /, tz=None)\n" "--\n" @@ -55,4 +109,4 @@ datetime_datetime_now(PyTypeObject *type, PyObject *const *args, Py_ssize_t narg exit: return return_value; } -/*[clinic end generated code: output=aae916ab728ca85b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=5e17549f29a439a5 input=a9049054013a1b77]*/ From webhook-mailer at python.org Sat May 16 12:15:07 2020 From: webhook-mailer at python.org (Paul Ganssle) Date: Sat, 16 May 2020 16:15:07 -0000 Subject: [Python-checkins] bpo-40503: Add documentation and what's new entry for zoneinfo (GH-20006) Message-ID: https://github.com/python/cpython/commit/b17e49e0def23238b9e7f48c8a02e2d7bbf1f653 commit: b17e49e0def23238b9e7f48c8a02e2d7bbf1f653 branch: master author: Paul Ganssle committer: GitHub date: 2020-05-16T12:14:58-04:00 summary: bpo-40503: Add documentation and what's new entry for zoneinfo (GH-20006) This adds the documentation for the `zoneinfo` module added in PEP 615: https://www.python.org/dev/peps/pep-0615/ The implementation itself was GH-19909: https://github.com/python/cpython/pull/19909 bpo-40503: https://bugs.python.org/issue40503 Co-authored-by: Victor Stinner files: A Doc/library/zoneinfo.rst A Misc/NEWS.d/next/Library/2020-05-08-15-48-39.bpo-40503.elZyxc.rst M Doc/library/datatypes.rst M Doc/whatsnew/3.9.rst diff --git a/Doc/library/datatypes.rst b/Doc/library/datatypes.rst index 94010c0e391b0..675bbb6fafdca 100644 --- a/Doc/library/datatypes.rst +++ b/Doc/library/datatypes.rst @@ -20,6 +20,7 @@ The following modules are documented in this chapter: .. toctree:: datetime.rst + zoneinfo.rst calendar.rst collections.rst collections.abc.rst diff --git a/Doc/library/zoneinfo.rst b/Doc/library/zoneinfo.rst new file mode 100644 index 0000000000000..d8e2796c7f631 --- /dev/null +++ b/Doc/library/zoneinfo.rst @@ -0,0 +1,390 @@ +:mod:`zoneinfo` --- IANA time zone support +========================================== + +.. module:: zoneinfo + :synopsis: IANA time zone support + +.. versionadded:: 3.9 + +.. moduleauthor:: Paul Ganssle +.. sectionauthor:: Paul Ganssle + +-------------- + +The :mod:`zoneinfo` module provides a concrete time zone implementation to +support the IANA time zone database as originally specified in :pep:`615`. By +default, :mod:`zoneinfo` uses the system's time zone data if available; if no +system time zone data is available, the library will fall back to using the +first-party `tzdata`_ package available on PyPI. + +.. seealso:: + + Module: :mod:`datetime` + Provides the :class:`~datetime.time` and :class:`~datetime.datetime` + types with which the :class:`ZoneInfo` class is designed to be used. + + Package `tzdata`_ + First-party package maintained by the CPython core developers to supply + time zone data via PyPI. + + +Using ``ZoneInfo`` +------------------ + +:class:`ZoneInfo` is a concrete implementation of the :class:`datetime.tzinfo` +abstract base class, and is intended to be attached to ``tzinfo``, either via +the constructor, the :meth:`datetime.replace ` +method or :meth:`datetime.astimezone `:: + + >>> from zoneinfo import ZoneInfo + >>> from datetime import datetime, timedelta + + >>> dt = datetime(2020, 10, 31, 12, tzinfo=ZoneInfo("America/Los_Angeles")) + >>> print(dt) + 2020-10-31 12:00:00-07:00 + + >>> dt.tzname() + 'PDT' + +Datetimes constructed in this way are compatible with datetime arithmetic and +handle daylight saving time transitions with no further intervention:: + + >>> dt_add = dt + timedelta(days=1) + + >>> print(dt_add) + 2020-11-01 12:00:00-08:00 + + >>> dt_add.tzname() + 'PST' + +These time zones also support the :attr:`~datetime.datetime.fold` attribute +introduced in :pep:`495`. During offset transitions which induce ambiguous +times (such as a daylight saving time to standard time transition), the offset +from *before* the transition is used when ``fold=0``, and the offset *after* +the transition is used when ``fold=1``, for example:: + + >>> dt = datetime(2020, 11, 1, 1, tzinfo=ZoneInfo("America/Los_Angeles")) + >>> print(dt) + 2020-11-01 01:00:00-07:00 + + >>> print(dt.replace(fold=1)) + 2020-11-01 01:00:00-08:00 + +When converting from another time zone, the fold will be set to the correct +value:: + + >>> from datetime import timezone + >>> LOS_ANGELES = ZoneInfo("America/Los_Angeles") + >>> dt_utc = datetime(2020, 11, 1, 8, tzinfo=timezone.utc) + + >>> # Before the PDT -> PST transition + >>> print(dt_utc.astimezone(LOS_ANGELES)) + 2020-11-01 01:00:00-07:00 + + >>> # After the PDT -> PST transition + >>> print((dt_utc + timedelta(hours=1)).astimezone(LOS_ANGELES)) + 2020-11-01 01:00:00-08:00 + +Data sources +------------ + +The ``zoneinfo`` module does not directly provide time zone data, and instead +pulls time zone information from the system time zone database or the +first-party PyPI package `tzdata`_, if available. Some systems, including +notably Windows systems, do not have an IANA database available, and so for +projects targeting cross-platform compatibility that require time zone data, it +is recommended to declare a dependency on tzdata. If neither system data nor +tzdata are available, all calls to :class:`ZoneInfo` will raise +:exc:`ZoneInfoNotFoundError`. + +.. _zoneinfo_data_configuration: + +Configuring the data sources +**************************** + +When ``ZoneInfo(key)`` is called, the constructor first searches the +directories specified in :data:`TZPATH` for a file matching ``key``, and on +failure looks for a match in the tzdata package. This behavior can be +configured in three ways: + +1. The default :data:`TZPATH` when not otherwise specified can be configured at + :ref:`compile time `. +2. :data:`TZPATH` can be configured using :ref:`an environment variable + `. +3. At :ref:`runtime `, the search path can be + manipulated using the :func:`reset_tzpath` function. + +.. _zoneinfo_data_compile_time_config: + +Compile-time configuration +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The default :data:`TZPATH` includes several common deployment locations for the +time zone database (except on Windows, where there are no "well-known" +locations for time zone data). On POSIX systems, downstream distributors and +those building Python from source who know where their system +time zone data is deployed may change the default time zone path by specifying +the compile-time option ``TZPATH`` (or, more likely, the ``configure`` flag +``--with-tzpath``), which should be a string delimited by :data:`os.pathsep`. + +On all platforms, the configured value is available as the ``TZPATH`` key in +:func:`sysconfig.get_config_var`. + +.. _zoneinfo_data_environment_var: + +Environment configuration +^^^^^^^^^^^^^^^^^^^^^^^^^ + +When initializing :data:`TZPATH` (either at import time or whenever +:func:`reset_tzpath` is called with no arguments), the ``zoneinfo`` module will +use the environment variable ``PYTHONTZPATH``, if it exists, to set the search +path. + +.. envvar:: PYTHONTZPATH + + This is an :data:`os.pathsep`-separated string containing the time zone + search path to use. It must consist of only absolute rather than relative + paths. Relative components specified in ``PYTHONTZPATH`` will not be used, + but otherwise the behavior when a relative path is specified is + implementation-defined; CPython will raise :exc:`InvalidTZPathWarning`, but + other implementations are free to silently ignore the erroneous component + or raise an exception. + +To set the system to ignore the system data and use the tzdata package +instead, set ``PYTHONTZPATH=""``. + +.. _zoneinfo_data_runtime_config: + +Runtime configuration +^^^^^^^^^^^^^^^^^^^^^ + +The TZ search path can also be configured at runtime using the +:func:`reset_tzpath` function. This is generally not an advisable operation, +though it is reasonable to use it in test functions that require the use of a +specific time zone path (or require disabling access to the system time zones). + + +The ``ZoneInfo`` class +---------------------- + +.. class:: ZoneInfo(key) + + A concrete :class:`datetime.tzinfo` subclass that represents an IANA time + zone specified by the string ``key``. Calls to the primary constructor will + always return objects that compare identically; put another way, barring + cache invalidation via :meth:`ZoneInfo.clear_cache`, for all values of + ``key``, the following assertion will always be true: + + .. code-block:: python + + a = ZoneInfo(key) + b = ZoneInfo(key) + assert a is b + + ``key`` must be in the form of a relative, normalized POSIX path, with no + up-level references. The constructor will raise :exc:`ValueError` if a + non-conforming key is passed. + + If no file matching ``key`` is found, the constructor will raise + :exc:`ZoneInfoNotFoundError`. + + +The ``ZoneInfo`` class has two alternate constructors: + +.. classmethod:: ZoneInfo.from_file(fobj, /, key=None) + + Constructs a ``ZoneInfo`` object from a file-like object returning bytes + (e.g. a file opened in binary mode or an :class:`io.BytesIO` object). + Unlike the primary constructor, this always constructs a new object. + + The ``key`` parameter sets the name of the zone for the purposes of + :py:meth:`~object.__str__` and :py:meth:`~object.__repr__`. + + Objects created via this constructor cannot be pickled (see `pickling`_). + +.. classmethod:: ZoneInfo.no_cache(key) + + An alternate constructor that bypasses the constructor's cache. It is + identical to the primary constructor, but returns a new object on each + call. This is most likely to be useful for testing or demonstration + purposes, but it can also be used to create a system with a different cache + invalidation strategy. + + Objects created via this constructor will also bypass the cache of a + deserializing process when unpickled. + + .. TODO: Add "See `cache_behavior`_" reference when that section is ready. + + .. caution:: + + Using this constructor may change the semantics of your datetimes in + surprising ways, only use it if you know that you need to. + +The following class methods are also available: + +.. classmethod:: ZoneInfo.clear_cache(*, only_keys=None) + + A method for invalidating the cache on the ``ZoneInfo`` class. If no + arguments are passed, all caches are invalidated and the next call to + the primary constructor for each key will return a new instance. + + If an iterable of key names is passed to the ``only_keys`` parameter, only + the specified keys will be removed from the cache. Keys passed to + ``only_keys`` but not found in the cache are ignored. + + .. TODO: Add "See `cache_behavior`_" reference when that section is ready. + + .. warning:: + + Invoking this function may change the semantics of datetimes using + ``ZoneInfo`` in surprising ways; this modifies process-wide global state + and thus may have wide-ranging effects. Only use it if you know that you + need to. + +The class has one attribute: + +.. attribute:: ZoneInfo.key + + This is a read-only :term:`attribute` that returns the value of ``key`` + passed to the constructor, which should be a lookup key in the IANA time + zone database (e.g. ``America/New_York``, ``Europe/Paris`` or + ``Asia/Tokyo``). + + For zones constructed from file without specifying a ``key`` parameter, + this will be set to ``None``. + + .. note:: + + Although it is a somewhat common practice to expose these to end users, + these values are designed to be primary keys for representing the + relevant zones and not necessarily user-facing elements. Projects like + CLDR (the Unicode Common Locale Data Repository) can be used to get + more user-friendly strings from these keys. + +String representations +********************** + +The string representation returned when calling :py:class:`str` on a +:class:`ZoneInfo` object defaults to using the :attr:`ZoneInfo.key` attribute (see +the note on usage in the attribute documentation):: + + >>> zone = ZoneInfo("Pacific/Kwajalein") + >>> str(zone) + 'Pacific/Kwajalein' + + >>> dt = datetime(2020, 4, 1, 3, 15, tzinfo=zone) + >>> f"{dt.isoformat()} [{dt.tzinfo}]" + '2020-04-01T03:15:00+12:00 [Pacific/Kwajalein]' + +For objects constructed from a file without specifying a ``key`` parameter, +``str`` falls back to calling :func:`repr`. ``ZoneInfo``'s ``repr`` is +implementation-defined and not necessarily stable between versions, but it is +guaranteed not to be a valid ``ZoneInfo`` key. + +.. _pickling: + +Pickle serialization +******************** + +Rather than serializing all transition data, ``ZoneInfo`` objects are +serialized by key, and ``ZoneInfo`` objects constructed from files (even those +with a value for ``key`` specified) cannot be pickled. + +The behavior of a ``ZoneInfo`` file depends on how it was constructed: + +1. ``ZoneInfo(key)``: When constructed with the primary constructor, a + ``ZoneInfo`` object is serialized by key, and when deserialized, the + deserializing process uses the primary and thus it is expected that these + are expected to be the same object as other references to the same time + zone. For example, if ``europe_berlin_pkl`` is a string containing a pickle + constructed from ``ZoneInfo("Europe/Berlin")``, one would expect the + following behavior: + + .. code-block:: + + >>> a = ZoneInfo("Europe/Berlin") + >>> b = pickle.loads(europe_berlin_pkl) + >>> a is b + True + +2. ``ZoneInfo.no_cache(key)``: When constructed from the cache-bypassing + constructor, the ``ZoneInfo`` object is also serialized by key, but when + deserialized, the deserializing process uses the cache bypassing + constructor. If ``europe_berlin_pkl_nc`` is a string containing a pickle + constructed from ``ZoneInfo.no_cache("Europe/Berlin")``, one would expect + the following behavior: + + .. code-block:: + + >>> a = ZoneInfo("Europe/Berlin") + >>> b = pickle.loads(europe_berlin_pkl_nc) + >>> a is b + False + +3. ``ZoneInfo.from_file(fobj, /, key=None)``: When constructed from a file, the + ``ZoneInfo`` object raises an exception on pickling. If an end user wants to + pickle a ``ZoneInfo`` constructed from a file, it is recommended that they + use a wrapper type or a custom serialization function: either serializing by + key or storing the contents of the file object and serializing that. + +This method of serialization requires that the time zone data for the required +key be available on both the serializing and deserializing side, similar to the +way that references to classes and functions are expected to exist in both the +serializing and deserializing environments. It also means that no guarantees +are made about the consistency of results when unpickling a ``ZoneInfo`` +pickled in an environment with a different version of the time zone data. + +Functions +--------- + +.. function:: reset_tzpath(to=None) + + Sets or resets the time zone search path (:data:`TZPATH`) for the module. + When called with no arguments, :data:`TZPATH` is set to the default value. + + Calling ``reset_tzpath`` will not invalidate the :class:`ZoneInfo` cache, + and so calls to the primary ``ZoneInfo`` constructor will only use the new + ``TZPATH`` in the case of a cache miss. + + The ``to`` parameter must be a :term:`sequence` of strings or + :class:`os.PathLike` and not a string, all of which must be absolute paths. + :exc:`ValueError` will be raised if something other than an absolute path + is passed. + +Globals +------- + +.. data:: TZPATH + + A read-only sequence representing the time zone search path -- when + constructing a ``ZoneInfo`` from a key, the key is joined to each entry in + the ``TZPATH``, and the first file found is used. + + ``TZPATH`` may contain only absolute paths, never relative paths, + regardless of how it is configured. + + The object that ``zoneinfo.TZPATH`` points to may change in response to a + call to :func:`reset_tzpath`, so it is recommended to use + ``zoneinfo.TZPATH`` rather than importing ``TZPATH`` from ``zoneinfo`` or + assigning a long-lived variable to ``zoneinfo.TZPATH``. + + For more information on configuring the time zone search path, see + :ref:`zoneinfo_data_configuration`. + +Exceptions and warnings +----------------------- + +.. exception:: ZoneInfoNotFoundError + + Raised when construction of a :class:`ZoneInfo` object fails because the + specified key could not be found on the system. This is a subclass of + :exc:`KeyError`. + +.. exception:: InvalidTZPathWarning + + Raised when :envvar:`PYTHONTZPATH` contains an invalid component that will + be filtered out, such as a relative path. + +.. Links and references: + +.. _tzdata: https://pypi.org/project/tzdata/ diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index bddb7102d90ee..479c33b4a7fa1 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -205,7 +205,44 @@ Other Language Changes New Modules =========== -* None yet. +zoneinfo +-------- + +The :mod:`zoneinfo` module brings support for the IANA time zone database to +the standard library. It adds :class:`zoneinfo.ZoneInfo`, a concrete +:class:`datetime.tzinfo` implementation backed by the system's time zone data. + +Example:: + + >>> from zoneinfo import ZoneInfo + >>> from datetime import datetime, timedelta + + >>> # Daylight saving time + >>> dt = datetime(2020, 10, 31, 12, tzinfo=ZoneInfo("America/Los_Angeles")) + >>> print(dt) + 2020-10-31 12:00:00-07:00 + >>> dt.tzname() + 'PDT' + + >>> # Standard time + >>> dt += timedelta(days=7) + >>> print(dt) + 2020-11-07 12:00:00-08:00 + >>> print(dt.tzname()) + PST + + +As a fall-back source of data for platforms that don't ship the IANA database, +the |tzdata|_ module was released as a first-party package -- distributed via +PyPI and maintained by the CPython core team. + +.. |tzdata| replace:: ``tzdata`` +.. _tzdata: https://pypi.org/project/tzdata/ + +.. seealso:: + + :pep:`615` -- Support for the IANA Time Zone Database in the Standard Library + PEP written and implemented by Paul Ganssle Improved Modules diff --git a/Misc/NEWS.d/next/Library/2020-05-08-15-48-39.bpo-40503.elZyxc.rst b/Misc/NEWS.d/next/Library/2020-05-08-15-48-39.bpo-40503.elZyxc.rst new file mode 100644 index 0000000000000..d68797a738c2c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-08-15-48-39.bpo-40503.elZyxc.rst @@ -0,0 +1 @@ +:pep:`615`, the :mod:`zoneinfo` module. Adds support for the IANA time zone database. From webhook-mailer at python.org Sat May 16 16:27:11 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Sat, 16 May 2020 20:27:11 -0000 Subject: [Python-checkins] bpo-37630: Use SHA3 and SHAKE XOF from OpenSSL (GH-16049) Message-ID: https://github.com/python/cpython/commit/d5b3f6b7f9fc74438009af63f1de01bd77be9385 commit: d5b3f6b7f9fc74438009af63f1de01bd77be9385 branch: master author: Christian Heimes committer: GitHub date: 2020-05-16T13:27:06-07:00 summary: bpo-37630: Use SHA3 and SHAKE XOF from OpenSSL (GH-16049) OpenSSL 1.1.1 comes with SHA3 and SHAKE builtin. Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran files: A Misc/NEWS.d/next/Library/2020-05-15-19-53-18.bpo-37630.O5kgAw.rst M Doc/library/hashlib.rst M Lib/hashlib.py M Lib/test/test_hashlib.py M Modules/_hashopenssl.c M Modules/clinic/_hashopenssl.c.h diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst index b737d22d4100c..d644974e66098 100644 --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -87,6 +87,8 @@ library that Python uses on your platform. On most platforms the that the hashing algorithm is not used in a security context, e.g. as a non-cryptographic one-way compression function. + Hashlib now uses SHA3 and SHAKE from OpenSSL 1.1.1 and newer. + For example, to obtain the digest of the byte string ``b'Nobody inspects the spammish repetition'``:: diff --git a/Lib/hashlib.py b/Lib/hashlib.py index 56873b7278b6a..0f81de094ca6e 100644 --- a/Lib/hashlib.py +++ b/Lib/hashlib.py @@ -71,8 +71,6 @@ __builtin_constructor_cache = {} __block_openssl_constructor = { - 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', - 'shake_128', 'shake_256', 'blake2b', 'blake2s', } @@ -125,6 +123,8 @@ def __get_openssl_constructor(name): # Prefer our blake2 and sha3 implementation. return __get_builtin_constructor(name) try: + # MD5, SHA1, and SHA2 are in all supported OpenSSL versions + # SHA3/shake are available in OpenSSL 1.1.1+ f = getattr(_hashlib, 'openssl_' + name) # Allow the C module to raise ValueError. The function will be # defined but the hash not actually available thanks to OpenSSL. diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py index f9fe7e37920a3..b901468db38b1 100644 --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -27,9 +27,10 @@ py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) try: - from _hashlib import HASH + from _hashlib import HASH, HASHXOF except ImportError: HASH = None + HASHXOF = None try: import _blake2 @@ -254,6 +255,9 @@ def test_digest_length_overflow(self): h = cons() if h.name not in self.shakes: continue + if HASH is not None and isinstance(h, HASH): + # _hashopenssl's take a size_t + continue for digest in h.digest, h.hexdigest: self.assertRaises(ValueError, digest, -10) for length in large_sizes: @@ -860,6 +864,18 @@ def hash_in_chunks(chunk_size): def test_get_fips_mode(self): self.assertIsInstance(c_hashlib.get_fips_mode(), int) + @unittest.skipUnless(HASH is not None, 'need _hashlib') + def test_internal_types(self): + # internal types like _hashlib.HASH are not constructable + with self.assertRaisesRegex( + TypeError, "cannot create 'HASH' instance" + ): + HASH() + with self.assertRaisesRegex( + TypeError, "cannot create 'HASHXOF' instance" + ): + HASHXOF() + class KDFTests(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Library/2020-05-15-19-53-18.bpo-37630.O5kgAw.rst b/Misc/NEWS.d/next/Library/2020-05-15-19-53-18.bpo-37630.O5kgAw.rst new file mode 100644 index 0000000000000..78458e6d1a46b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-15-19-53-18.bpo-37630.O5kgAw.rst @@ -0,0 +1,2 @@ +The :mod:`hashlib` module can now use SHA3 hashes and SHAKE XOF from OpenSSL +when available. diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index b55ac93f2b076..936b515addbc1 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -55,6 +55,9 @@ static PyModuleDef _hashlibmodule; typedef struct { PyTypeObject *EVPtype; +#ifdef PY_OPENSSL_HAS_SHAKE + PyTypeObject *EVPXOFtype; +#endif } _hashlibstate; static inline _hashlibstate* @@ -79,8 +82,9 @@ typedef struct { /*[clinic input] module _hashlib class _hashlib.HASH "EVPobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPtype" +class _hashlib.HASHXOF "EVPobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPXOFtype" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=1adf85e8eb2ab979]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=813acc7b2d8f322c]*/ /* LCOV_EXCL_START */ @@ -114,6 +118,15 @@ _setException(PyObject *exc) } /* LCOV_EXCL_STOP */ +/* {Py_tp_new, NULL} doesn't block __new__ */ +static PyObject * +_disabled_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + PyErr_Format(PyExc_TypeError, + "cannot create '%.100s' instances", _PyType_Name(type)); + return NULL; +} + static PyObject* py_digest_name(const EVP_MD *md) { @@ -249,11 +262,9 @@ py_digest_by_name(const char *name) } static EVPobject * -newEVPobject(void) +newEVPobject(PyTypeObject *type) { - EVPobject *retval = (EVPobject *)PyObject_New( - EVPobject, _hashlibstate_global->EVPtype - ); + EVPobject *retval = (EVPobject *)PyObject_New(EVPobject, type); if (retval == NULL) { return NULL; } @@ -327,7 +338,7 @@ EVP_copy_impl(EVPobject *self) { EVPobject *newobj; - if ( (newobj = newEVPobject())==NULL) + if ((newobj = newEVPobject(Py_TYPE(self))) == NULL) return NULL; if (!locked_EVP_MD_CTX_copy(newobj->ctx, self)) { @@ -502,7 +513,8 @@ EVP_repr(EVPobject *self) if (!name_obj) { return NULL; } - repr = PyUnicode_FromFormat("<%U HASH object @ %p>", name_obj, self); + repr = PyUnicode_FromFormat("<%U %s object @ %p>", + name_obj, Py_TYPE(self)->tp_name, self); Py_DECREF(name_obj); return repr; } @@ -531,6 +543,7 @@ static PyType_Slot EVPtype_slots[] = { {Py_tp_doc, (char *)hashtype_doc}, {Py_tp_methods, EVP_methods}, {Py_tp_getset, EVP_getseters}, + {Py_tp_new, _disabled_new}, {0, 0}, }; @@ -542,19 +555,179 @@ static PyType_Spec EVPtype_spec = { EVPtype_slots }; +#ifdef PY_OPENSSL_HAS_SHAKE + +/*[clinic input] +_hashlib.HASHXOF.digest as EVPXOF_digest + + length: Py_ssize_t + +Return the digest value as a bytes object. +[clinic start generated code]*/ + +static PyObject * +EVPXOF_digest_impl(EVPobject *self, Py_ssize_t length) +/*[clinic end generated code: output=ef9320c23280efad input=816a6537cea3d1db]*/ +{ + EVP_MD_CTX *temp_ctx; + PyObject *retval = PyBytes_FromStringAndSize(NULL, length); + + if (retval == NULL) { + return NULL; + } + + temp_ctx = EVP_MD_CTX_new(); + if (temp_ctx == NULL) { + Py_DECREF(retval); + PyErr_NoMemory(); + return NULL; + } + + if (!locked_EVP_MD_CTX_copy(temp_ctx, self)) { + Py_DECREF(retval); + EVP_MD_CTX_free(temp_ctx); + return _setException(PyExc_ValueError); + } + if (!EVP_DigestFinalXOF(temp_ctx, + (unsigned char*)PyBytes_AS_STRING(retval), + length)) { + Py_DECREF(retval); + EVP_MD_CTX_free(temp_ctx); + _setException(PyExc_ValueError); + return NULL; + } + + EVP_MD_CTX_free(temp_ctx); + return retval; +} + +/*[clinic input] +_hashlib.HASHXOF.hexdigest as EVPXOF_hexdigest + + length: Py_ssize_t + +Return the digest value as a string of hexadecimal digits. +[clinic start generated code]*/ + +static PyObject * +EVPXOF_hexdigest_impl(EVPobject *self, Py_ssize_t length) +/*[clinic end generated code: output=eb3e6ee7788bf5b2 input=5f9d6a8f269e34df]*/ +{ + unsigned char *digest; + EVP_MD_CTX *temp_ctx; + PyObject *retval; + + digest = (unsigned char*)PyMem_Malloc(length); + if (digest == NULL) { + PyErr_NoMemory(); + return NULL; + } + + temp_ctx = EVP_MD_CTX_new(); + if (temp_ctx == NULL) { + PyMem_Free(digest); + PyErr_NoMemory(); + return NULL; + } + + /* Get the raw (binary) digest value */ + if (!locked_EVP_MD_CTX_copy(temp_ctx, self)) { + PyMem_Free(digest); + EVP_MD_CTX_free(temp_ctx); + return _setException(PyExc_ValueError); + } + if (!EVP_DigestFinalXOF(temp_ctx, digest, length)) { + PyMem_Free(digest); + EVP_MD_CTX_free(temp_ctx); + _setException(PyExc_ValueError); + return NULL; + } + + EVP_MD_CTX_free(temp_ctx); + + retval = _Py_strhex((const char *)digest, length); + PyMem_Free(digest); + return retval; +} + +static PyMethodDef EVPXOF_methods[] = { + EVPXOF_DIGEST_METHODDEF + EVPXOF_HEXDIGEST_METHODDEF + {NULL, NULL} /* sentinel */ +}; + + +static PyObject * +EVPXOF_get_digest_size(EVPobject *self, void *closure) +{ + return PyLong_FromLong(0); +} + +static PyGetSetDef EVPXOF_getseters[] = { + {"digest_size", + (getter)EVPXOF_get_digest_size, NULL, + NULL, + NULL}, + {NULL} /* Sentinel */ +}; + +PyDoc_STRVAR(hashxoftype_doc, +"HASHXOF(name, string=b\'\')\n" +"--\n" +"\n" +"A hash is an object used to calculate a checksum of a string of information.\n" +"\n" +"Methods:\n" +"\n" +"update() -- updates the current digest with an additional string\n" +"digest(length) -- return the current digest value\n" +"hexdigest(length) -- return the current digest as a string of hexadecimal digits\n" +"copy() -- return a copy of the current hash object\n" +"\n" +"Attributes:\n" +"\n" +"name -- the hash algorithm being used by this object\n" +"digest_size -- number of bytes in this hashes output"); + +static PyType_Slot EVPXOFtype_slots[] = { + {Py_tp_doc, (char *)hashxoftype_doc}, + {Py_tp_methods, EVPXOF_methods}, + {Py_tp_getset, EVPXOF_getseters}, + {Py_tp_new, _disabled_new}, + {0, 0}, +}; + +static PyType_Spec EVPXOFtype_spec = { + "_hashlib.HASHXOF", /*tp_name*/ + sizeof(EVPobject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + EVPXOFtype_slots +}; + + +#endif + static PyObject * -EVPnew(const EVP_MD *digest, +EVPnew(PyObject *module, const EVP_MD *digest, const unsigned char *cp, Py_ssize_t len, int usedforsecurity) { int result = 0; EVPobject *self; + PyTypeObject *type = get_hashlib_state(module)->EVPtype; if (!digest) { PyErr_SetString(PyExc_ValueError, "unsupported hash type"); return NULL; } - if ((self = newEVPobject()) == NULL) +#ifdef PY_OPENSSL_HAS_SHAKE + if ((EVP_MD_flags(digest) & EVP_MD_FLAG_XOF) == EVP_MD_FLAG_XOF) { + type = get_hashlib_state(module)->EVPXOFtype; + } +#endif + + if ((self = newEVPobject(type)) == NULL) return NULL; if (!usedforsecurity) { @@ -614,7 +787,7 @@ EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj, Py_buffer view = { 0 }; PyObject *ret_obj; char *name; - const EVP_MD *digest; + const EVP_MD *digest = NULL; if (!PyArg_Parse(name_obj, "s", &name)) { PyErr_SetString(PyExc_TypeError, "name must be a string"); @@ -626,7 +799,7 @@ EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj, digest = py_digest_by_name(name); - ret_obj = EVPnew(digest, + ret_obj = EVPnew(module, digest, (unsigned char*)view.buf, view.len, usedforsecurity); @@ -645,7 +818,7 @@ EVP_fast_new(PyObject *module, PyObject *data_obj, const EVP_MD *digest, if (data_obj) GET_BUFFER_VIEW_OR_ERROUT(data_obj, &view); - ret_obj = EVPnew(digest, + ret_obj = EVPnew(module, digest, (unsigned char*)view.buf, view.len, usedforsecurity); @@ -775,6 +948,125 @@ _hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj, } +#ifdef PY_OPENSSL_HAS_SHA3 + +/*[clinic input] +_hashlib.openssl_sha3_224 + + string as data_obj: object(py_default="b''") = NULL + * + usedforsecurity: bool = True + +Returns a sha3-224 hash object; optionally initialized with a string + +[clinic start generated code]*/ + +static PyObject * +_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity) +/*[clinic end generated code: output=144641c1d144b974 input=e3a01b2888916157]*/ +{ + return EVP_fast_new(module, data_obj, EVP_sha3_224(), usedforsecurity); +} + +/*[clinic input] +_hashlib.openssl_sha3_256 + + string as data_obj: object(py_default="b''") = NULL + * + usedforsecurity: bool = True + +Returns a sha3-256 hash object; optionally initialized with a string + +[clinic start generated code]*/ + +static PyObject * +_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity) +/*[clinic end generated code: output=c61f1ab772d06668 input=e2908126c1b6deed]*/ +{ + return EVP_fast_new(module, data_obj, EVP_sha3_256(), usedforsecurity); +} + +/*[clinic input] +_hashlib.openssl_sha3_384 + + string as data_obj: object(py_default="b''") = NULL + * + usedforsecurity: bool = True + +Returns a sha3-384 hash object; optionally initialized with a string + +[clinic start generated code]*/ + +static PyObject * +_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity) +/*[clinic end generated code: output=f68e4846858cf0ee input=ec0edf5c792f8252]*/ +{ + return EVP_fast_new(module, data_obj, EVP_sha3_384(), usedforsecurity); +} + +/*[clinic input] +_hashlib.openssl_sha3_512 + + string as data_obj: object(py_default="b''") = NULL + * + usedforsecurity: bool = True + +Returns a sha3-512 hash object; optionally initialized with a string + +[clinic start generated code]*/ + +static PyObject * +_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity) +/*[clinic end generated code: output=2eede478c159354a input=64e2cc0c094d56f4]*/ +{ + return EVP_fast_new(module, data_obj, EVP_sha3_512(), usedforsecurity); +} +#endif /* PY_OPENSSL_HAS_SHA3 */ + +#ifdef PY_OPENSSL_HAS_SHAKE +/*[clinic input] +_hashlib.openssl_shake128 + + string as data_obj: object(py_default="b''") = NULL + * + usedforsecurity: bool = True + +Returns a shake128 variable hash object; optionally initialized with a string + +[clinic start generated code]*/ + +static PyObject * +_hashlib_openssl_shake128_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity) +/*[clinic end generated code: output=c68a0e30b4c09e1a input=b6d1e9566bacbb64]*/ +{ + return EVP_fast_new(module, data_obj, EVP_shake128(), usedforsecurity); +} + +/*[clinic input] +_hashlib.openssl_shake256 + + string as data_obj: object(py_default="b''") = NULL + * + usedforsecurity: bool = True + +Returns a shake256 variable hash object; optionally initialized with a string + +[clinic start generated code]*/ + +static PyObject * +_hashlib_openssl_shake256_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity) +/*[clinic end generated code: output=d56387762dcad516 input=591b9b78c0498116]*/ +{ + return EVP_fast_new(module, data_obj, EVP_shake256(), usedforsecurity); +} +#endif /* PY_OPENSSL_HAS_SHAKE */ + /*[clinic input] _hashlib.pbkdf2_hmac as pbkdf2_hmac @@ -1163,6 +1455,12 @@ static struct PyMethodDef EVP_functions[] = { _HASHLIB_OPENSSL_SHA256_METHODDEF _HASHLIB_OPENSSL_SHA384_METHODDEF _HASHLIB_OPENSSL_SHA512_METHODDEF + _HASHLIB_OPENSSL_SHA3_224_METHODDEF + _HASHLIB_OPENSSL_SHA3_256_METHODDEF + _HASHLIB_OPENSSL_SHA3_384_METHODDEF + _HASHLIB_OPENSSL_SHA3_512_METHODDEF + _HASHLIB_OPENSSL_SHAKE128_METHODDEF + _HASHLIB_OPENSSL_SHAKE256_METHODDEF {NULL, NULL} /* Sentinel */ }; @@ -1174,6 +1472,9 @@ hashlib_traverse(PyObject *m, visitproc visit, void *arg) { _hashlibstate *state = get_hashlib_state(m); Py_VISIT(state->EVPtype); +#ifdef PY_OPENSSL_HAS_SHAKE + Py_VISIT(state->EVPXOFtype); +#endif return 0; } @@ -1182,6 +1483,9 @@ hashlib_clear(PyObject *m) { _hashlibstate *state = get_hashlib_state(m); Py_CLEAR(state->EVPtype); +#ifdef PY_OPENSSL_HAS_SHAKE + Py_CLEAR(state->EVPXOFtype); +#endif return 0; } @@ -1208,6 +1512,10 @@ PyMODINIT_FUNC PyInit__hashlib(void) { PyObject *m, *openssl_md_meth_names; + _hashlibstate *state = NULL; +#ifdef PY_OPENSSL_HAS_SHAKE + PyObject *bases; +#endif #if (OPENSSL_VERSION_NUMBER < 0x10100000L) || defined(LIBRESSL_VERSION_NUMBER) /* Load all digest algorithms and initialize cpuid */ @@ -1225,10 +1533,37 @@ PyInit__hashlib(void) if (m == NULL) return NULL; + state = get_hashlib_state(m); + PyTypeObject *EVPtype = (PyTypeObject *)PyType_FromSpec(&EVPtype_spec); - if (EVPtype == NULL) + if (EVPtype == NULL) { + Py_DECREF(m); return NULL; - get_hashlib_state(m)->EVPtype = EVPtype; + } + state->EVPtype = EVPtype; + + Py_INCREF((PyObject *)state->EVPtype); + PyModule_AddObject(m, "HASH", (PyObject *)state->EVPtype); + +#ifdef PY_OPENSSL_HAS_SHAKE + bases = PyTuple_Pack(1, (PyObject *)EVPtype); + if (bases == NULL) { + Py_DECREF(m); + return NULL; + } + PyTypeObject *EVPXOFtype = (PyTypeObject *)PyType_FromSpecWithBases( + &EVPXOFtype_spec, bases + ); + Py_DECREF(bases); + if (EVPXOFtype == NULL) { + Py_DECREF(m); + return NULL; + } + state->EVPXOFtype = EVPXOFtype; + + Py_INCREF((PyObject *)state->EVPXOFtype); + PyModule_AddObject(m, "HASHXOF", (PyObject *)state->EVPXOFtype); +#endif openssl_md_meth_names = generate_hash_name_list(); if (openssl_md_meth_names == NULL) { @@ -1240,9 +1575,6 @@ PyInit__hashlib(void) return NULL; } - Py_INCREF((PyObject *)get_hashlib_state(m)->EVPtype); - PyModule_AddObject(m, "HASH", (PyObject *)get_hashlib_state(m)->EVPtype); - PyState_AddModule(m, &_hashlibmodule); return m; } diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index 1b0c6d0ce43d2..71c9246c95a93 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -65,6 +65,110 @@ PyDoc_STRVAR(EVP_update__doc__, #define EVP_UPDATE_METHODDEF \ {"update", (PyCFunction)EVP_update, METH_O, EVP_update__doc__}, +#if defined(PY_OPENSSL_HAS_SHAKE) + +PyDoc_STRVAR(EVPXOF_digest__doc__, +"digest($self, /, length)\n" +"--\n" +"\n" +"Return the digest value as a bytes object."); + +#define EVPXOF_DIGEST_METHODDEF \ + {"digest", (PyCFunction)(void(*)(void))EVPXOF_digest, METH_FASTCALL|METH_KEYWORDS, EVPXOF_digest__doc__}, + +static PyObject * +EVPXOF_digest_impl(EVPobject *self, Py_ssize_t length); + +static PyObject * +EVPXOF_digest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"length", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "digest", 0}; + PyObject *argsbuf[1]; + Py_ssize_t length; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (PyFloat_Check(args[0])) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + goto exit; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = PyNumber_Index(args[0]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + length = ival; + } + return_value = EVPXOF_digest_impl(self, length); + +exit: + return return_value; +} + +#endif /* defined(PY_OPENSSL_HAS_SHAKE) */ + +#if defined(PY_OPENSSL_HAS_SHAKE) + +PyDoc_STRVAR(EVPXOF_hexdigest__doc__, +"hexdigest($self, /, length)\n" +"--\n" +"\n" +"Return the digest value as a string of hexadecimal digits."); + +#define EVPXOF_HEXDIGEST_METHODDEF \ + {"hexdigest", (PyCFunction)(void(*)(void))EVPXOF_hexdigest, METH_FASTCALL|METH_KEYWORDS, EVPXOF_hexdigest__doc__}, + +static PyObject * +EVPXOF_hexdigest_impl(EVPobject *self, Py_ssize_t length); + +static PyObject * +EVPXOF_hexdigest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"length", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "hexdigest", 0}; + PyObject *argsbuf[1]; + Py_ssize_t length; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (PyFloat_Check(args[0])) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + goto exit; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = PyNumber_Index(args[0]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + length = ival; + } + return_value = EVPXOF_hexdigest_impl(self, length); + +exit: + return return_value; +} + +#endif /* defined(PY_OPENSSL_HAS_SHAKE) */ + PyDoc_STRVAR(EVP_new__doc__, "new($module, /, name, string=b\'\', *, usedforsecurity=True)\n" "--\n" @@ -436,6 +540,342 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg return return_value; } +#if defined(PY_OPENSSL_HAS_SHA3) + +PyDoc_STRVAR(_hashlib_openssl_sha3_224__doc__, +"openssl_sha3_224($module, /, string=b\'\', *, usedforsecurity=True)\n" +"--\n" +"\n" +"Returns a sha3-224 hash object; optionally initialized with a string"); + +#define _HASHLIB_OPENSSL_SHA3_224_METHODDEF \ + {"openssl_sha3_224", (PyCFunction)(void(*)(void))_hashlib_openssl_sha3_224, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_224__doc__}, + +static PyObject * +_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity); + +static PyObject * +_hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha3_224", 0}; + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *data_obj = NULL; + int usedforsecurity = 1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (args[0]) { + data_obj = args[0]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + usedforsecurity = PyObject_IsTrue(args[1]); + if (usedforsecurity < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = _hashlib_openssl_sha3_224_impl(module, data_obj, usedforsecurity); + +exit: + return return_value; +} + +#endif /* defined(PY_OPENSSL_HAS_SHA3) */ + +#if defined(PY_OPENSSL_HAS_SHA3) + +PyDoc_STRVAR(_hashlib_openssl_sha3_256__doc__, +"openssl_sha3_256($module, /, string=b\'\', *, usedforsecurity=True)\n" +"--\n" +"\n" +"Returns a sha3-256 hash object; optionally initialized with a string"); + +#define _HASHLIB_OPENSSL_SHA3_256_METHODDEF \ + {"openssl_sha3_256", (PyCFunction)(void(*)(void))_hashlib_openssl_sha3_256, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_256__doc__}, + +static PyObject * +_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity); + +static PyObject * +_hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha3_256", 0}; + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *data_obj = NULL; + int usedforsecurity = 1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (args[0]) { + data_obj = args[0]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + usedforsecurity = PyObject_IsTrue(args[1]); + if (usedforsecurity < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = _hashlib_openssl_sha3_256_impl(module, data_obj, usedforsecurity); + +exit: + return return_value; +} + +#endif /* defined(PY_OPENSSL_HAS_SHA3) */ + +#if defined(PY_OPENSSL_HAS_SHA3) + +PyDoc_STRVAR(_hashlib_openssl_sha3_384__doc__, +"openssl_sha3_384($module, /, string=b\'\', *, usedforsecurity=True)\n" +"--\n" +"\n" +"Returns a sha3-384 hash object; optionally initialized with a string"); + +#define _HASHLIB_OPENSSL_SHA3_384_METHODDEF \ + {"openssl_sha3_384", (PyCFunction)(void(*)(void))_hashlib_openssl_sha3_384, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_384__doc__}, + +static PyObject * +_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity); + +static PyObject * +_hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha3_384", 0}; + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *data_obj = NULL; + int usedforsecurity = 1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (args[0]) { + data_obj = args[0]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + usedforsecurity = PyObject_IsTrue(args[1]); + if (usedforsecurity < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = _hashlib_openssl_sha3_384_impl(module, data_obj, usedforsecurity); + +exit: + return return_value; +} + +#endif /* defined(PY_OPENSSL_HAS_SHA3) */ + +#if defined(PY_OPENSSL_HAS_SHA3) + +PyDoc_STRVAR(_hashlib_openssl_sha3_512__doc__, +"openssl_sha3_512($module, /, string=b\'\', *, usedforsecurity=True)\n" +"--\n" +"\n" +"Returns a sha3-512 hash object; optionally initialized with a string"); + +#define _HASHLIB_OPENSSL_SHA3_512_METHODDEF \ + {"openssl_sha3_512", (PyCFunction)(void(*)(void))_hashlib_openssl_sha3_512, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_512__doc__}, + +static PyObject * +_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity); + +static PyObject * +_hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "openssl_sha3_512", 0}; + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *data_obj = NULL; + int usedforsecurity = 1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (args[0]) { + data_obj = args[0]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + usedforsecurity = PyObject_IsTrue(args[1]); + if (usedforsecurity < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = _hashlib_openssl_sha3_512_impl(module, data_obj, usedforsecurity); + +exit: + return return_value; +} + +#endif /* defined(PY_OPENSSL_HAS_SHA3) */ + +#if defined(PY_OPENSSL_HAS_SHAKE) + +PyDoc_STRVAR(_hashlib_openssl_shake128__doc__, +"openssl_shake128($module, /, string=b\'\', *, usedforsecurity=True)\n" +"--\n" +"\n" +"Returns a shake128 variable hash object; optionally initialized with a string"); + +#define _HASHLIB_OPENSSL_SHAKE128_METHODDEF \ + {"openssl_shake128", (PyCFunction)(void(*)(void))_hashlib_openssl_shake128, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake128__doc__}, + +static PyObject * +_hashlib_openssl_shake128_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity); + +static PyObject * +_hashlib_openssl_shake128(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "openssl_shake128", 0}; + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *data_obj = NULL; + int usedforsecurity = 1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (args[0]) { + data_obj = args[0]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + usedforsecurity = PyObject_IsTrue(args[1]); + if (usedforsecurity < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = _hashlib_openssl_shake128_impl(module, data_obj, usedforsecurity); + +exit: + return return_value; +} + +#endif /* defined(PY_OPENSSL_HAS_SHAKE) */ + +#if defined(PY_OPENSSL_HAS_SHAKE) + +PyDoc_STRVAR(_hashlib_openssl_shake256__doc__, +"openssl_shake256($module, /, string=b\'\', *, usedforsecurity=True)\n" +"--\n" +"\n" +"Returns a shake256 variable hash object; optionally initialized with a string"); + +#define _HASHLIB_OPENSSL_SHAKE256_METHODDEF \ + {"openssl_shake256", (PyCFunction)(void(*)(void))_hashlib_openssl_shake256, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake256__doc__}, + +static PyObject * +_hashlib_openssl_shake256_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity); + +static PyObject * +_hashlib_openssl_shake256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "openssl_shake256", 0}; + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *data_obj = NULL; + int usedforsecurity = 1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (args[0]) { + data_obj = args[0]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + usedforsecurity = PyObject_IsTrue(args[1]); + if (usedforsecurity < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = _hashlib_openssl_shake256_impl(module, data_obj, usedforsecurity); + +exit: + return return_value; +} + +#endif /* defined(PY_OPENSSL_HAS_SHAKE) */ + PyDoc_STRVAR(pbkdf2_hmac__doc__, "pbkdf2_hmac($module, /, hash_name, password, salt, iterations,\n" " dklen=None)\n" @@ -764,6 +1204,38 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #endif /* !defined(LIBRESSL_VERSION_NUMBER) */ +#ifndef EVPXOF_DIGEST_METHODDEF + #define EVPXOF_DIGEST_METHODDEF +#endif /* !defined(EVPXOF_DIGEST_METHODDEF) */ + +#ifndef EVPXOF_HEXDIGEST_METHODDEF + #define EVPXOF_HEXDIGEST_METHODDEF +#endif /* !defined(EVPXOF_HEXDIGEST_METHODDEF) */ + +#ifndef _HASHLIB_OPENSSL_SHA3_224_METHODDEF + #define _HASHLIB_OPENSSL_SHA3_224_METHODDEF +#endif /* !defined(_HASHLIB_OPENSSL_SHA3_224_METHODDEF) */ + +#ifndef _HASHLIB_OPENSSL_SHA3_256_METHODDEF + #define _HASHLIB_OPENSSL_SHA3_256_METHODDEF +#endif /* !defined(_HASHLIB_OPENSSL_SHA3_256_METHODDEF) */ + +#ifndef _HASHLIB_OPENSSL_SHA3_384_METHODDEF + #define _HASHLIB_OPENSSL_SHA3_384_METHODDEF +#endif /* !defined(_HASHLIB_OPENSSL_SHA3_384_METHODDEF) */ + +#ifndef _HASHLIB_OPENSSL_SHA3_512_METHODDEF + #define _HASHLIB_OPENSSL_SHA3_512_METHODDEF +#endif /* !defined(_HASHLIB_OPENSSL_SHA3_512_METHODDEF) */ + +#ifndef _HASHLIB_OPENSSL_SHAKE128_METHODDEF + #define _HASHLIB_OPENSSL_SHAKE128_METHODDEF +#endif /* !defined(_HASHLIB_OPENSSL_SHAKE128_METHODDEF) */ + +#ifndef _HASHLIB_OPENSSL_SHAKE256_METHODDEF + #define _HASHLIB_OPENSSL_SHAKE256_METHODDEF +#endif /* !defined(_HASHLIB_OPENSSL_SHAKE256_METHODDEF) */ + #ifndef _HASHLIB_SCRYPT_METHODDEF #define _HASHLIB_SCRYPT_METHODDEF #endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */ @@ -771,4 +1243,4 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF #define _HASHLIB_GET_FIPS_MODE_METHODDEF #endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */ -/*[clinic end generated code: output=4babbd88389a196b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a39bf0a766d7cdf7 input=a9049054013a1b77]*/ From webhook-mailer at python.org Sat May 16 17:46:15 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 16 May 2020 21:46:15 -0000 Subject: [Python-checkins] bpo-38870: Do not separate factor prefixes in ast.unparse (GH-20133) Message-ID: https://github.com/python/cpython/commit/ce4a753dcb3eef3d68e892a6515490b1aa219651 commit: ce4a753dcb3eef3d68e892a6515490b1aa219651 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-16T22:46:11+01:00 summary: bpo-38870: Do not separate factor prefixes in ast.unparse (GH-20133) files: M Lib/ast.py M Lib/test/test_unparse.py diff --git a/Lib/ast.py b/Lib/ast.py index 7a43581c0e6ce..1de37b9567ece 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -1190,10 +1190,10 @@ def visit_Tuple(self, node): unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"} unop_precedence = { - "~": _Precedence.FACTOR, "not": _Precedence.NOT, + "~": _Precedence.FACTOR, "+": _Precedence.FACTOR, - "-": _Precedence.FACTOR + "-": _Precedence.FACTOR, } def visit_UnaryOp(self, node): @@ -1201,7 +1201,10 @@ def visit_UnaryOp(self, node): operator_precedence = self.unop_precedence[operator] with self.require_parens(operator_precedence, node): self.write(operator) - self.write(" ") + # factor prefixes (+, -, ~) shouldn't be seperated + # from the value they belong, (e.g: +1 instead of + 1) + if operator_precedence is not _Precedence.FACTOR: + self.write(" ") self.set_precedence(operator_precedence, node.operand) self.traverse(node.operand) diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index 2be44b246aa69..1393bcce741c9 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -347,7 +347,7 @@ def test_simple_expressions_parens(self): self.check_src_roundtrip("(1 + 2) / 3") self.check_src_roundtrip("(1 + 2) * 3 + 4 * (5 + 2)") self.check_src_roundtrip("(1 + 2) * 3 + 4 * (5 + 2) ** 2") - self.check_src_roundtrip("~ x") + self.check_src_roundtrip("~x") self.check_src_roundtrip("x and y") self.check_src_roundtrip("x and y and z") self.check_src_roundtrip("x and (y and x)") @@ -401,6 +401,12 @@ def test_docstrings_negative_cases(self): self.check_ast_roundtrip(src) self.check_src_dont_roundtrip(src) + def test_unary_op_factor(self): + for prefix in ("+", "-", "~"): + self.check_src_roundtrip(f"{prefix}1") + for prefix in ("not",): + self.check_src_roundtrip(f"{prefix} 1") + class DirectoryTestCase(ASTTestCase): """Test roundtrip behaviour on all files in Lib and Lib/test.""" From webhook-mailer at python.org Sat May 16 17:53:30 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 16 May 2020 21:53:30 -0000 Subject: [Python-checkins] bpo-38870: Don't put unnecessary parentheses on class declarations in ast.parse (GH-20134) Message-ID: https://github.com/python/cpython/commit/25160cdc4775a1ddb4e37c8bf5a6e31ad9c146ed commit: 25160cdc4775a1ddb4e37c8bf5a6e31ad9c146ed branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-16T22:53:25+01:00 summary: bpo-38870: Don't put unnecessary parentheses on class declarations in ast.parse (GH-20134) files: M Lib/ast.py M Lib/test/test_unparse.py diff --git a/Lib/ast.py b/Lib/ast.py index 1de37b9567ece..d6cb334432c9c 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -930,7 +930,7 @@ def visit_ClassDef(self, node): self.fill("@") self.traverse(deco) self.fill("class " + node.name) - with self.delimit("(", ")"): + with self.delimit_if("(", ")", condition = node.bases or node.keywords): comma = False for e in node.bases: if comma: diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index 1393bcce741c9..410df7dbb7581 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -110,7 +110,7 @@ class Foo: pass docstring_prefixes = [ "", - "class foo():\n ", + "class foo:\n ", "def foo():\n ", "async def foo():\n ", ] @@ -367,6 +367,19 @@ def test_simple_expressions_parens(self): self.check_src_roundtrip("call((yield x))") self.check_src_roundtrip("return x + (yield x)") + + def test_class_bases_and_keywords(self): + self.check_src_roundtrip("class X:\n pass") + self.check_src_roundtrip("class X(A):\n pass") + self.check_src_roundtrip("class X(A, B, C, D):\n pass") + self.check_src_roundtrip("class X(x=y):\n pass") + self.check_src_roundtrip("class X(metaclass=z):\n pass") + self.check_src_roundtrip("class X(x=y, z=d):\n pass") + self.check_src_roundtrip("class X(A, x=y):\n pass") + self.check_src_roundtrip("class X(A, **kw):\n pass") + self.check_src_roundtrip("class X(*args):\n pass") + self.check_src_roundtrip("class X(*args, **kwargs):\n pass") + def test_docstrings(self): docstrings = ( '"""simple doc string"""', From webhook-mailer at python.org Sat May 16 18:36:22 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 16 May 2020 22:36:22 -0000 Subject: [Python-checkins] bpo-39740: Early declare devpoll_methods to support old compilers (GH-19281) Message-ID: https://github.com/python/cpython/commit/31245d19f2de03e57fd93c5169f00a36d7410fcf commit: 31245d19f2de03e57fd93c5169f00a36d7410fcf branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-16T23:36:14+01:00 summary: bpo-39740: Early declare devpoll_methods to support old compilers (GH-19281) files: M Modules/selectmodule.c diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 5c15e9973ab84..bec236689bd89 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -764,6 +764,8 @@ poll_dealloc(pollObject *self) #ifdef HAVE_SYS_DEVPOLL_H +static PyMethodDef devpoll_methods[]; + typedef struct { PyObject_HEAD int fd_devpoll; From webhook-mailer at python.org Sat May 16 18:36:45 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 16 May 2020 22:36:45 -0000 Subject: [Python-checkins] bpo-40211: Clarify os.preadv and os.pwritev are supported on AIX 7.1+ (GH-19401) Message-ID: https://github.com/python/cpython/commit/cae2275949157490b469d79ef250387eca324b9e commit: cae2275949157490b469d79ef250387eca324b9e branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-16T23:36:40+01:00 summary: bpo-40211: Clarify os.preadv and os.pwritev are supported on AIX 7.1+ (GH-19401) files: M Doc/library/os.rst diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 943e67625c8ee..6d5fb314a8e39 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -1151,7 +1151,8 @@ or `the MSDN `_ on Windo Combine the functionality of :func:`os.readv` and :func:`os.pread`. .. availability:: Linux 2.6.30 and newer, FreeBSD 6.0 and newer, - OpenBSD 2.7 and newer. Using flags requires Linux 4.6 or newer. + OpenBSD 2.7 and newer, AIX 7.1 and newer. Using flags requires + Linux 4.6 or newer. .. versionadded:: 3.7 @@ -1219,7 +1220,8 @@ or `the MSDN `_ on Windo Combine the functionality of :func:`os.writev` and :func:`os.pwrite`. .. availability:: Linux 2.6.30 and newer, FreeBSD 6.0 and newer, - OpenBSD 2.7 and newer. Using flags requires Linux 4.7 or newer. + OpenBSD 2.7 and newer, AIX 7.1 and newer. Using flags requires + Linux 4.7 or newer. .. versionadded:: 3.7 From webhook-mailer at python.org Sat May 16 18:38:07 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 16 May 2020 22:38:07 -0000 Subject: [Python-checkins] bpo-40165: Suppress stderr when checking if test_stty_match should be skipped (GH-19325) Message-ID: https://github.com/python/cpython/commit/d5a980a60790571ec88aba4e011c91e099e31e98 commit: d5a980a60790571ec88aba4e011c91e099e31e98 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-16T23:38:02+01:00 summary: bpo-40165: Suppress stderr when checking if test_stty_match should be skipped (GH-19325) files: M Lib/test/test_os.py diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 362ba9e1042cb..0db7d30f6385e 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -3485,7 +3485,11 @@ def test_stty_match(self): should work too. """ try: - size = subprocess.check_output(['stty', 'size']).decode().split() + size = ( + subprocess.check_output( + ["stty", "size"], stderr=subprocess.DEVNULL, text=True + ).split() + ) except (FileNotFoundError, subprocess.CalledProcessError, PermissionError): self.skipTest("stty invocation failed") From webhook-mailer at python.org Sat May 16 18:49:12 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 16 May 2020 22:49:12 -0000 Subject: [Python-checkins] bpo-38870: Correctly handle empty docstrings in ast.unparse (GH-18768) Message-ID: https://github.com/python/cpython/commit/e966af7cff78e14e1d289db587433504b4b53533 commit: e966af7cff78e14e1d289db587433504b4b53533 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-16T23:49:07+01:00 summary: bpo-38870: Correctly handle empty docstrings in ast.unparse (GH-18768) Co-authored-by: Pablo Galindo files: M Lib/ast.py M Lib/test/test_unparse.py diff --git a/Lib/ast.py b/Lib/ast.py index d6cb334432c9c..5d0171f107299 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -1075,11 +1075,14 @@ def _write_docstring(self, node): if node.kind == "u": self.write("u") - # Preserve quotes in the docstring by escaping them - value = node.value.replace("\\", "\\\\") - value = value.replace('"""', '""\"') - if value[-1] == '"': - value = value.replace('"', '\\"', -1) + value = node.value + if value: + # Preserve quotes in the docstring by escaping them + value = value.replace("\\", "\\\\") + value = value.replace('"""', '""\"') + value = value.replace("\r", "\\r") + if value[-1] == '"': + value = value.replace('"', '\\"', -1) self.write(f'"""{value}"""') diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index 410df7dbb7581..4f5742852e23d 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -313,11 +313,18 @@ def test_invalid_yield_from(self): def test_docstrings(self): docstrings = ( 'this ends with double quote"', - 'this includes a """triple quote"""' + 'this includes a """triple quote"""', + '\r', + '\\r', + '\t', + '\\t', + '\n', + '\\n', + '\r\\r\t\\t\n\\n' ) for docstring in docstrings: # check as Module docstrings for easy testing - self.check_ast_roundtrip(f"'{docstring}'") + self.check_ast_roundtrip(f"'''{docstring}'''") def test_constant_tuples(self): self.check_src_roundtrip(ast.Constant(value=(1,), kind=None), "(1,)") @@ -390,6 +397,10 @@ def test_docstrings(self): empty newline"""''', '"""With some \t"""', '"""Foo "bar" baz """', + '"""\\r"""', + '""""""', + '"""\'\'\'"""', + '"""\'\'\'\'\'\'"""', ) for prefix in docstring_prefixes: From webhook-mailer at python.org Sat May 16 19:04:20 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 16 May 2020 23:04:20 -0000 Subject: [Python-checkins] bpo-38870: Implement round tripping support for typed AST in ast.unparse (GH-17797) Message-ID: https://github.com/python/cpython/commit/dff92bb31f7db1a80ac431811f8108bd0ef9be43 commit: dff92bb31f7db1a80ac431811f8108bd0ef9be43 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-17T00:04:12+01:00 summary: bpo-38870: Implement round tripping support for typed AST in ast.unparse (GH-17797) files: M Lib/ast.py M Lib/test/test_unparse.py diff --git a/Lib/ast.py b/Lib/ast.py index 5d0171f107299..61fbe030a7825 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -648,6 +648,7 @@ def __init__(self): self._source = [] self._buffer = [] self._precedences = {} + self._type_ignores = {} self._indent = 0 def interleave(self, inter, f, seq): @@ -697,11 +698,15 @@ def buffer(self): return value @contextmanager - def block(self): + def block(self, *, extra = None): """A context manager for preparing the source for blocks. It adds the character':', increases the indentation on enter and decreases - the indentation on exit.""" + the indentation on exit. If *extra* is given, it will be directly + appended after the colon character. + """ self.write(":") + if extra: + self.write(extra) self._indent += 1 yield self._indent -= 1 @@ -748,6 +753,11 @@ def get_raw_docstring(self, node): if isinstance(node, Constant) and isinstance(node.value, str): return node + def get_type_comment(self, node): + comment = self._type_ignores.get(node.lineno) or node.type_comment + if comment is not None: + return f" # type: {comment}" + def traverse(self, node): if isinstance(node, list): for item in node: @@ -770,7 +780,12 @@ def _write_docstring_and_traverse_body(self, node): self.traverse(node.body) def visit_Module(self, node): + self._type_ignores = { + ignore.lineno: f"ignore{ignore.tag}" + for ignore in node.type_ignores + } self._write_docstring_and_traverse_body(node) + self._type_ignores.clear() def visit_FunctionType(self, node): with self.delimit("(", ")"): @@ -811,6 +826,8 @@ def visit_Assign(self, node): self.traverse(target) self.write(" = ") self.traverse(node.value) + if type_comment := self.get_type_comment(node): + self.write(type_comment) def visit_AugAssign(self, node): self.fill() @@ -966,7 +983,7 @@ def _function_helper(self, node, fill_suffix): if node.returns: self.write(" -> ") self.traverse(node.returns) - with self.block(): + with self.block(extra=self.get_type_comment(node)): self._write_docstring_and_traverse_body(node) def visit_For(self, node): @@ -980,7 +997,7 @@ def _for_helper(self, fill, node): self.traverse(node.target) self.write(" in ") self.traverse(node.iter) - with self.block(): + with self.block(extra=self.get_type_comment(node)): self.traverse(node.body) if node.orelse: self.fill("else") @@ -1018,13 +1035,13 @@ def visit_While(self, node): def visit_With(self, node): self.fill("with ") self.interleave(lambda: self.write(", "), self.traverse, node.items) - with self.block(): + with self.block(extra=self.get_type_comment(node)): self.traverse(node.body) def visit_AsyncWith(self, node): self.fill("async with ") self.interleave(lambda: self.write(", "), self.traverse, node.items) - with self.block(): + with self.block(extra=self.get_type_comment(node)): self.traverse(node.body) def visit_JoinedStr(self, node): diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index 4f5742852e23d..d543ca2526ece 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -108,12 +108,12 @@ class Foo: pass suite1 """ -docstring_prefixes = [ +docstring_prefixes = ( "", "class foo:\n ", "def foo():\n ", "async def foo():\n ", -] +) class ASTTestCase(unittest.TestCase): def assertASTEqual(self, ast1, ast2): @@ -340,6 +340,37 @@ def test_function_type(self): ): self.check_ast_roundtrip(function_type, mode="func_type") + def test_type_comments(self): + for statement in ( + "a = 5 # type:", + "a = 5 # type: int", + "a = 5 # type: int and more", + "def x(): # type: () -> None\n\tpass", + "def x(y): # type: (int) -> None and more\n\tpass", + "async def x(): # type: () -> None\n\tpass", + "async def x(y): # type: (int) -> None and more\n\tpass", + "for x in y: # type: int\n\tpass", + "async for x in y: # type: int\n\tpass", + "with x(): # type: int\n\tpass", + "async with x(): # type: int\n\tpass" + ): + self.check_ast_roundtrip(statement, type_comments=True) + + def test_type_ignore(self): + for statement in ( + "a = 5 # type: ignore", + "a = 5 # type: ignore and more", + "def x(): # type: ignore\n\tpass", + "def x(y): # type: ignore and more\n\tpass", + "async def x(): # type: ignore\n\tpass", + "async def x(y): # type: ignore and more\n\tpass", + "for x in y: # type: ignore\n\tpass", + "async for x in y: # type: ignore\n\tpass", + "with x(): # type: ignore\n\tpass", + "async with x(): # type: ignore\n\tpass" + ): + self.check_ast_roundtrip(statement, type_comments=True) + class CosmeticTestCase(ASTTestCase): """Test if there are cosmetic issues caused by unnecesary additions""" From webhook-mailer at python.org Sat May 16 19:05:44 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Sat, 16 May 2020 23:05:44 -0000 Subject: [Python-checkins] bpo-40645: Deprecated internal details of hmac.HMAC (GH-20132) Message-ID: https://github.com/python/cpython/commit/837f9e42e3a1ad03b340661afe85e67d2719334f commit: 837f9e42e3a1ad03b340661afe85e67d2719334f branch: master author: Christian Heimes committer: GitHub date: 2020-05-17T01:05:40+02:00 summary: bpo-40645: Deprecated internal details of hmac.HMAC (GH-20132) files: A Misc/NEWS.d/next/Library/2020-05-16-19-34-38.bpo-40645.7ibMt-.rst M Doc/library/hmac.rst M Lib/hmac.py M Lib/test/test_hmac.py diff --git a/Doc/library/hmac.rst b/Doc/library/hmac.rst index 57ac8bb16120f..5ad348490eaf6 100644 --- a/Doc/library/hmac.rst +++ b/Doc/library/hmac.rst @@ -114,6 +114,12 @@ A hash object has the following attributes: .. versionadded:: 3.4 +.. deprecated:: 3.9 + + The undocumented attributes ``HMAC.digest_cons``, ``HMAC.inner``, and + ``HMAC.outer`` are internal implementation details and will be removed in + Python 3.10. + This module also provides the following helper function: .. function:: compare_digest(a, b) diff --git a/Lib/hmac.py b/Lib/hmac.py index b769876e6f774..54a1ef9bdbdcf 100644 --- a/Lib/hmac.py +++ b/Lib/hmac.py @@ -30,6 +30,10 @@ class HMAC: """ blocksize = 64 # 512-bit HMAC; can be changed in subclasses. + __slots__ = ( + "_digest_cons", "_inner", "_outer", "block_size", "digest_size" + ) + def __init__(self, key, msg=None, digestmod=''): """Create a new HMAC object. @@ -51,18 +55,18 @@ def __init__(self, key, msg=None, digestmod=''): raise TypeError("Missing required parameter 'digestmod'.") if callable(digestmod): - self.digest_cons = digestmod + self._digest_cons = digestmod elif isinstance(digestmod, str): - self.digest_cons = lambda d=b'': _hashlib.new(digestmod, d) + self._digest_cons = lambda d=b'': _hashlib.new(digestmod, d) else: - self.digest_cons = lambda d=b'': digestmod.new(d) + self._digest_cons = lambda d=b'': digestmod.new(d) - self.outer = self.digest_cons() - self.inner = self.digest_cons() - self.digest_size = self.inner.digest_size + self._outer = self._digest_cons() + self._inner = self._digest_cons() + self.digest_size = self._inner.digest_size - if hasattr(self.inner, 'block_size'): - blocksize = self.inner.block_size + if hasattr(self._inner, 'block_size'): + blocksize = self._inner.block_size if blocksize < 16: _warnings.warn('block_size of %d seems too small; using our ' 'default of %d.' % (blocksize, self.blocksize), @@ -79,21 +83,33 @@ def __init__(self, key, msg=None, digestmod=''): self.block_size = blocksize if len(key) > blocksize: - key = self.digest_cons(key).digest() + key = self._digest_cons(key).digest() key = key.ljust(blocksize, b'\0') - self.outer.update(key.translate(trans_5C)) - self.inner.update(key.translate(trans_36)) + self._outer.update(key.translate(trans_5C)) + self._inner.update(key.translate(trans_36)) if msg is not None: self.update(msg) @property def name(self): - return "hmac-" + self.inner.name + return "hmac-" + self._inner.name + + @property + def digest_cons(self): + return self._digest_cons + + @property + def inner(self): + return self._inner + + @property + def outer(self): + return self._outer def update(self, msg): """Feed data from msg into this hashing object.""" - self.inner.update(msg) + self._inner.update(msg) def copy(self): """Return a separate copy of this hashing object. @@ -102,10 +118,10 @@ def copy(self): """ # Call __new__ directly to avoid the expensive __init__. other = self.__class__.__new__(self.__class__) - other.digest_cons = self.digest_cons + other._digest_cons = self._digest_cons other.digest_size = self.digest_size - other.inner = self.inner.copy() - other.outer = self.outer.copy() + other._inner = self._inner.copy() + other._outer = self._outer.copy() return other def _current(self): @@ -113,8 +129,8 @@ def _current(self): To be used only internally with digest() and hexdigest(). """ - h = self.outer.copy() - h.update(self.inner.digest()) + h = self._outer.copy() + h.update(self._inner.digest()) return h def digest(self): diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py index 08086f0e78c83..1f3ec4cb9172d 100644 --- a/Lib/test/test_hmac.py +++ b/Lib/test/test_hmac.py @@ -409,11 +409,11 @@ def test_attributes(self): # Testing if attributes are of same type. h1 = hmac.HMAC(b"key", digestmod="sha256") h2 = h1.copy() - self.assertTrue(h1.digest_cons == h2.digest_cons, + self.assertTrue(h1._digest_cons == h2._digest_cons, "digest constructors don't match.") - self.assertEqual(type(h1.inner), type(h2.inner), + self.assertEqual(type(h1._inner), type(h2._inner), "Types of inner don't match.") - self.assertEqual(type(h1.outer), type(h2.outer), + self.assertEqual(type(h1._outer), type(h2._outer), "Types of outer don't match.") @hashlib_helper.requires_hashdigest('sha256') @@ -423,10 +423,21 @@ def test_realcopy(self): h2 = h1.copy() # Using id() in case somebody has overridden __eq__/__ne__. self.assertTrue(id(h1) != id(h2), "No real copy of the HMAC instance.") - self.assertTrue(id(h1.inner) != id(h2.inner), + self.assertTrue(id(h1._inner) != id(h2._inner), "No real copy of the attribute 'inner'.") - self.assertTrue(id(h1.outer) != id(h2.outer), + self.assertTrue(id(h1._outer) != id(h2._outer), "No real copy of the attribute 'outer'.") + self.assertEqual(h1._inner, h1.inner) + self.assertEqual(h1._outer, h1.outer) + self.assertEqual(h1._digest_cons, h1.digest_cons) + + @hashlib_helper.requires_hashdigest('sha256') + def test_properties(self): + # deprecated properties + h1 = hmac.HMAC(b"key", digestmod="sha256") + self.assertEqual(h1._inner, h1.inner) + self.assertEqual(h1._outer, h1.outer) + self.assertEqual(h1._digest_cons, h1.digest_cons) @hashlib_helper.requires_hashdigest('sha256') def test_equality(self): diff --git a/Misc/NEWS.d/next/Library/2020-05-16-19-34-38.bpo-40645.7ibMt-.rst b/Misc/NEWS.d/next/Library/2020-05-16-19-34-38.bpo-40645.7ibMt-.rst new file mode 100644 index 0000000000000..19d5a651eb49a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-16-19-34-38.bpo-40645.7ibMt-.rst @@ -0,0 +1,3 @@ +The :class:`hmac.HMAC` exposes internal implementation details. The +attributes ``digest_cons``, ``inner``, and ``outer`` are deprecated and will +be removed in the future. From webhook-mailer at python.org Sat May 16 20:22:05 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sun, 17 May 2020 00:22:05 -0000 Subject: [Python-checkins] Use subTest in test_exceptions for better error reporting (GH-20140) Message-ID: https://github.com/python/cpython/commit/af8e5f84d909487a66558d086cb1754f49221236 commit: af8e5f84d909487a66558d086cb1754f49221236 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-17T01:22:00+01:00 summary: Use subTest in test_exceptions for better error reporting (GH-20140) files: M Lib/test/test_exceptions.py diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index efd77fdbaabe1..196f31e76ccbe 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -33,16 +33,17 @@ def __str__(self): class ExceptionTests(unittest.TestCase): def raise_catch(self, exc, excname): - try: - raise exc("spam") - except exc as err: - buf1 = str(err) - try: - raise exc("spam") - except exc as err: - buf2 = str(err) - self.assertEqual(buf1, buf2) - self.assertEqual(exc.__name__, excname) + with self.subTest(exc=exc, excname=excname): + try: + raise exc("spam") + except exc as err: + buf1 = str(err) + try: + raise exc("spam") + except exc as err: + buf2 = str(err) + self.assertEqual(buf1, buf2) + self.assertEqual(exc.__name__, excname) def testRaising(self): self.raise_catch(AttributeError, "AttributeError") @@ -133,13 +134,14 @@ def testSyntaxErrorMessage(self): # these code fragments def ckmsg(src, msg): - try: - compile(src, '', 'exec') - except SyntaxError as e: - if e.msg != msg: - self.fail("expected %s, got %s" % (msg, e.msg)) - else: - self.fail("failed to get expected SyntaxError") + with self.subTest(src=src, msg=msg): + try: + compile(src, '', 'exec') + except SyntaxError as e: + if e.msg != msg: + self.fail("expected %s, got %s" % (msg, e.msg)) + else: + self.fail("failed to get expected SyntaxError") s = '''if 1: try: @@ -179,15 +181,16 @@ def ckmsg(src, msg, exception=SyntaxError): ckmsg(s, "inconsistent use of tabs and spaces in indentation", TabError) def check(self, src, lineno, offset, encoding='utf-8'): - with self.assertRaises(SyntaxError) as cm: - compile(src, '', 'exec') - self.assertEqual(cm.exception.lineno, lineno) - self.assertEqual(cm.exception.offset, offset) - if cm.exception.text is not None: - if not isinstance(src, str): - src = src.decode(encoding, 'replace') - line = src.split('\n')[lineno-1] - self.assertIn(line, cm.exception.text) + with self.subTest(source=src, lineno=lineno, offset=offset): + with self.assertRaises(SyntaxError) as cm: + compile(src, '', 'exec') + self.assertEqual(cm.exception.lineno, lineno) + self.assertEqual(cm.exception.offset, offset) + if cm.exception.text is not None: + if not isinstance(src, str): + src = src.decode(encoding, 'replace') + line = src.split('\n')[lineno-1] + self.assertIn(line, cm.exception.text) def testSyntaxErrorOffset(self): check = self.check From webhook-mailer at python.org Sat May 16 22:54:01 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sun, 17 May 2020 02:54:01 -0000 Subject: [Python-checkins] bpo-38870: Use subTest in test_unparse for better error reporting (GH-20141) Message-ID: https://github.com/python/cpython/commit/6341fc7257d89d798675ad6e425f7eb0b6f2b4bb commit: 6341fc7257d89d798675ad6e425f7eb0b6f2b4bb branch: master author: Pablo Galindo committer: GitHub date: 2020-05-17T03:53:57+01:00 summary: bpo-38870: Use subTest in test_unparse for better error reporting (GH-20141) files: M Lib/test/test_unparse.py diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index d543ca2526ece..67dcb1dae79ff 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -120,13 +120,15 @@ def assertASTEqual(self, ast1, ast2): self.assertEqual(ast.dump(ast1), ast.dump(ast2)) def check_ast_roundtrip(self, code1, **kwargs): - ast1 = ast.parse(code1, **kwargs) - code2 = ast.unparse(ast1) - ast2 = ast.parse(code2, **kwargs) - self.assertASTEqual(ast1, ast2) + with self.subTest(code1=code1, ast_parse_kwargs=kwargs): + ast1 = ast.parse(code1, **kwargs) + code2 = ast.unparse(ast1) + ast2 = ast.parse(code2, **kwargs) + self.assertASTEqual(ast1, ast2) def check_invalid(self, node, raises=ValueError): - self.assertRaises(raises, ast.unparse, node) + with self.subTest(node=node): + self.assertRaises(raises, ast.unparse, node) def get_source(self, code1, code2=None): code2 = code2 or code1 @@ -135,11 +137,13 @@ def get_source(self, code1, code2=None): def check_src_roundtrip(self, code1, code2=None): code1, code2 = self.get_source(code1, code2) - self.assertEqual(code2, code1) + with self.subTest(code1=code1, code2=code2): + self.assertEqual(code2, code1) def check_src_dont_roundtrip(self, code1, code2=None): code1, code2 = self.get_source(code1, code2) - self.assertNotEqual(code2, code1) + with self.subTest(code1=code1, code2=code2): + self.assertNotEqual(code2, code1) class UnparseTestCase(ASTTestCase): # Tests for specific bugs found in earlier versions of unparse From webhook-mailer at python.org Sat May 16 23:19:28 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sun, 17 May 2020 03:19:28 -0000 Subject: [Python-checkins] bpo-40334: Improvements to error-handling code in the PEG parser (GH-20003) Message-ID: https://github.com/python/cpython/commit/2c8cd06afe8e0abb52367f85978f19b88e2df53e commit: 2c8cd06afe8e0abb52367f85978f19b88e2df53e branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-17T04:19:23+01:00 summary: bpo-40334: Improvements to error-handling code in the PEG parser (GH-20003) The following improvements are implemented in this commit: - `p->error_indicator` is set, in case malloc or realloc fail. - Avoid memory leaks in the case that realloc fails. - Call `PyErr_NoMemory()` instead of `PyErr_Format()`, because it requires no memory. Co-authored-by: Pablo Galindo files: M Parser/pegen/parse.c M Tools/peg_generator/pegen/c_generator.py diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index f4c5692212768..2a9dad7d1d7ef 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -11139,7 +11139,8 @@ _loop0_1_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -11153,11 +11154,13 @@ _loop0_1_rule(Parser *p) _res = newline_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -11166,8 +11169,9 @@ _loop0_1_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_1"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -11188,7 +11192,8 @@ _loop0_2_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -11202,11 +11207,13 @@ _loop0_2_rule(Parser *p) _res = newline_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -11215,8 +11222,9 @@ _loop0_2_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_2"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -11237,7 +11245,8 @@ _loop0_4_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -11259,11 +11268,13 @@ _loop0_4_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -11272,8 +11283,9 @@ _loop0_4_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_4"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -11322,7 +11334,8 @@ _loop0_6_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -11344,11 +11357,13 @@ _loop0_6_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -11357,8 +11372,9 @@ _loop0_6_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_6"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -11407,7 +11423,8 @@ _loop0_8_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -11429,11 +11446,13 @@ _loop0_8_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -11442,8 +11461,9 @@ _loop0_8_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_8"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -11492,7 +11512,8 @@ _loop0_10_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -11514,11 +11535,13 @@ _loop0_10_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -11527,8 +11550,9 @@ _loop0_10_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_10"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -11577,7 +11601,8 @@ _loop1_11_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -11591,11 +11616,13 @@ _loop1_11_rule(Parser *p) _res = statement_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -11608,8 +11635,9 @@ _loop1_11_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_11"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -11630,7 +11658,8 @@ _loop0_13_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -11652,11 +11681,13 @@ _loop0_13_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -11665,8 +11696,9 @@ _loop0_13_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_13"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12016,7 +12048,8 @@ _loop1_22_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12030,11 +12063,13 @@ _loop1_22_rule(Parser *p) _res = _tmp_135_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12047,8 +12082,9 @@ _loop1_22_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_22"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12141,7 +12177,8 @@ _loop0_26_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12163,11 +12200,13 @@ _loop0_26_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12176,8 +12215,9 @@ _loop0_26_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_26"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12226,7 +12266,8 @@ _loop0_28_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12248,11 +12289,13 @@ _loop0_28_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12261,8 +12304,9 @@ _loop0_28_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_28"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12343,7 +12387,8 @@ _loop0_30_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12357,11 +12402,13 @@ _loop0_30_rule(Parser *p) _res = _tmp_136_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12370,8 +12417,9 @@ _loop0_30_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_30"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12392,7 +12440,8 @@ _loop1_31_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12406,11 +12455,13 @@ _loop1_31_rule(Parser *p) _res = _tmp_137_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12423,8 +12474,9 @@ _loop1_31_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_31"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12445,7 +12497,8 @@ _loop0_33_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12467,11 +12520,13 @@ _loop0_33_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12480,8 +12535,9 @@ _loop0_33_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_33"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12562,7 +12618,8 @@ _loop0_36_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12584,11 +12641,13 @@ _loop0_36_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12597,8 +12656,9 @@ _loop0_36_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_36"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12679,7 +12739,8 @@ _loop0_39_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12701,11 +12762,13 @@ _loop0_39_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12714,8 +12777,9 @@ _loop0_39_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_39"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12764,7 +12828,8 @@ _loop0_41_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12786,11 +12851,13 @@ _loop0_41_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12799,8 +12866,9 @@ _loop0_41_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_41"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12849,7 +12917,8 @@ _loop0_43_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12871,11 +12940,13 @@ _loop0_43_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12884,8 +12955,9 @@ _loop0_43_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_43"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -12934,7 +13006,8 @@ _loop0_45_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -12956,11 +13029,13 @@ _loop0_45_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -12969,8 +13044,9 @@ _loop0_45_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_45"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13051,7 +13127,8 @@ _loop1_47_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13065,11 +13142,13 @@ _loop1_47_rule(Parser *p) _res = except_block_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13082,8 +13161,9 @@ _loop1_47_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_47"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13260,7 +13340,8 @@ _loop0_53_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13274,11 +13355,13 @@ _loop0_53_rule(Parser *p) _res = param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13287,8 +13370,9 @@ _loop0_53_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_53"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13309,7 +13393,8 @@ _loop0_54_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13323,11 +13408,13 @@ _loop0_54_rule(Parser *p) _res = param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13336,8 +13423,9 @@ _loop0_54_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_54"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13358,7 +13446,8 @@ _loop0_55_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13372,11 +13461,13 @@ _loop0_55_rule(Parser *p) _res = param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13385,8 +13476,9 @@ _loop0_55_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_55"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13407,7 +13499,8 @@ _loop1_56_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13421,11 +13514,13 @@ _loop1_56_rule(Parser *p) _res = param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13438,8 +13533,9 @@ _loop1_56_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_56"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13460,7 +13556,8 @@ _loop0_57_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13474,11 +13571,13 @@ _loop0_57_rule(Parser *p) _res = param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13487,8 +13586,9 @@ _loop0_57_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_57"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13509,7 +13609,8 @@ _loop1_58_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13523,11 +13624,13 @@ _loop1_58_rule(Parser *p) _res = param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13540,8 +13643,9 @@ _loop1_58_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_58"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13562,7 +13666,8 @@ _loop1_59_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13576,11 +13681,13 @@ _loop1_59_rule(Parser *p) _res = param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13593,8 +13700,9 @@ _loop1_59_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_59"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13615,7 +13723,8 @@ _loop1_60_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13629,11 +13738,13 @@ _loop1_60_rule(Parser *p) _res = param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13646,8 +13757,9 @@ _loop1_60_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_60"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13668,7 +13780,8 @@ _loop0_61_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13682,11 +13795,13 @@ _loop0_61_rule(Parser *p) _res = param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13695,8 +13810,9 @@ _loop0_61_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_61"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13717,7 +13833,8 @@ _loop1_62_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13731,11 +13848,13 @@ _loop1_62_rule(Parser *p) _res = param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13748,8 +13867,9 @@ _loop1_62_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_62"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13770,7 +13890,8 @@ _loop0_63_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13784,11 +13905,13 @@ _loop0_63_rule(Parser *p) _res = param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13797,8 +13920,9 @@ _loop0_63_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_63"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13819,7 +13943,8 @@ _loop1_64_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13833,11 +13958,13 @@ _loop1_64_rule(Parser *p) _res = param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13850,8 +13977,9 @@ _loop1_64_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_64"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13872,7 +14000,8 @@ _loop0_65_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13886,11 +14015,13 @@ _loop0_65_rule(Parser *p) _res = param_maybe_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13899,8 +14030,9 @@ _loop0_65_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_65"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13921,7 +14053,8 @@ _loop1_66_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13935,11 +14068,13 @@ _loop1_66_rule(Parser *p) _res = param_maybe_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -13952,8 +14087,9 @@ _loop1_66_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_66"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -13974,7 +14110,8 @@ _loop1_67_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -13988,11 +14125,13 @@ _loop1_67_rule(Parser *p) _res = _tmp_138_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14005,8 +14144,9 @@ _loop1_67_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_67"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14062,7 +14202,8 @@ _loop0_70_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14084,11 +14225,13 @@ _loop0_70_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14097,8 +14240,9 @@ _loop0_70_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_70"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14147,7 +14291,8 @@ _loop1_71_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14161,11 +14306,13 @@ _loop1_71_rule(Parser *p) _res = _tmp_139_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14178,8 +14325,9 @@ _loop1_71_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_71"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14200,7 +14348,8 @@ _loop0_73_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14222,11 +14371,13 @@ _loop0_73_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14235,8 +14386,9 @@ _loop0_73_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_73"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14285,7 +14437,8 @@ _loop1_74_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14299,11 +14452,13 @@ _loop1_74_rule(Parser *p) _res = _tmp_140_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14316,8 +14471,9 @@ _loop1_74_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_74"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14338,7 +14494,8 @@ _loop0_75_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14352,11 +14509,13 @@ _loop0_75_rule(Parser *p) _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14365,8 +14524,9 @@ _loop0_75_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_75"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14387,7 +14547,8 @@ _loop0_76_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14401,11 +14562,13 @@ _loop0_76_rule(Parser *p) _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14414,8 +14577,9 @@ _loop0_76_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_76"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14436,7 +14600,8 @@ _loop0_77_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14450,11 +14615,13 @@ _loop0_77_rule(Parser *p) _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14463,8 +14630,9 @@ _loop0_77_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_77"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14485,7 +14653,8 @@ _loop1_78_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14499,11 +14668,13 @@ _loop1_78_rule(Parser *p) _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14516,8 +14687,9 @@ _loop1_78_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_78"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14538,7 +14710,8 @@ _loop0_79_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14552,11 +14725,13 @@ _loop0_79_rule(Parser *p) _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14565,8 +14740,9 @@ _loop0_79_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_79"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14587,7 +14763,8 @@ _loop1_80_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14601,11 +14778,13 @@ _loop1_80_rule(Parser *p) _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14618,8 +14797,9 @@ _loop1_80_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_80"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14640,7 +14820,8 @@ _loop1_81_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14654,11 +14835,13 @@ _loop1_81_rule(Parser *p) _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14671,8 +14854,9 @@ _loop1_81_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_81"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14693,7 +14877,8 @@ _loop1_82_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14707,11 +14892,13 @@ _loop1_82_rule(Parser *p) _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14724,8 +14911,9 @@ _loop1_82_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_82"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14746,7 +14934,8 @@ _loop0_83_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14760,11 +14949,13 @@ _loop0_83_rule(Parser *p) _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14773,8 +14964,9 @@ _loop0_83_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_83"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14795,7 +14987,8 @@ _loop1_84_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14809,11 +15002,13 @@ _loop1_84_rule(Parser *p) _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14826,8 +15021,9 @@ _loop1_84_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_84"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14848,7 +15044,8 @@ _loop0_85_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14862,11 +15059,13 @@ _loop0_85_rule(Parser *p) _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14875,8 +15074,9 @@ _loop0_85_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_85"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14897,7 +15097,8 @@ _loop1_86_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14911,11 +15112,13 @@ _loop1_86_rule(Parser *p) _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14928,8 +15131,9 @@ _loop1_86_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_86"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14950,7 +15154,8 @@ _loop0_87_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -14964,11 +15169,13 @@ _loop0_87_rule(Parser *p) _res = lambda_param_maybe_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -14977,8 +15184,9 @@ _loop0_87_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_87"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -14999,7 +15207,8 @@ _loop1_88_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15013,11 +15222,13 @@ _loop1_88_rule(Parser *p) _res = lambda_param_maybe_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15030,8 +15241,9 @@ _loop1_88_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_88"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15052,7 +15264,8 @@ _loop1_89_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15066,11 +15279,13 @@ _loop1_89_rule(Parser *p) _res = _tmp_141_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15083,8 +15298,9 @@ _loop1_89_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_89"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15105,7 +15321,8 @@ _loop1_90_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15119,11 +15336,13 @@ _loop1_90_rule(Parser *p) _res = _tmp_142_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15136,8 +15355,9 @@ _loop1_90_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_90"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15158,7 +15378,8 @@ _loop1_91_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15172,11 +15393,13 @@ _loop1_91_rule(Parser *p) _res = compare_op_bitwise_or_pair_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15189,8 +15412,9 @@ _loop1_91_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_91"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15240,7 +15464,8 @@ _loop0_94_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15262,11 +15487,13 @@ _loop0_94_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15275,8 +15502,9 @@ _loop0_94_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_94"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15498,7 +15726,8 @@ _loop1_99_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15512,11 +15741,13 @@ _loop1_99_rule(Parser *p) _res = string_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15529,8 +15760,9 @@ _loop1_99_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_99"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15622,7 +15854,8 @@ _loop0_103_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15644,11 +15877,13 @@ _loop0_103_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15657,8 +15892,9 @@ _loop0_103_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_103"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15707,7 +15943,8 @@ _loop1_104_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15721,11 +15958,13 @@ _loop1_104_rule(Parser *p) _res = for_if_clause_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15738,8 +15977,9 @@ _loop1_104_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_104"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15760,7 +16000,8 @@ _loop0_105_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15774,11 +16015,13 @@ _loop0_105_rule(Parser *p) _res = _tmp_143_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15787,8 +16030,9 @@ _loop0_105_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_105"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15809,7 +16053,8 @@ _loop0_106_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15823,11 +16068,13 @@ _loop0_106_rule(Parser *p) _res = _tmp_144_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15836,8 +16083,9 @@ _loop0_106_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_106"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -15922,7 +16170,8 @@ _loop0_110_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -15944,11 +16193,13 @@ _loop0_110_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -15957,8 +16208,9 @@ _loop0_110_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_110"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -16007,7 +16259,8 @@ _loop0_112_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -16029,11 +16282,13 @@ _loop0_112_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -16042,8 +16297,9 @@ _loop0_112_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_112"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -16092,7 +16348,8 @@ _loop0_114_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -16114,11 +16371,13 @@ _loop0_114_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -16127,8 +16386,9 @@ _loop0_114_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_114"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -16177,7 +16437,8 @@ _loop0_116_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -16199,11 +16460,13 @@ _loop0_116_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -16212,8 +16475,9 @@ _loop0_116_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_116"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -16262,7 +16526,8 @@ _loop0_117_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -16276,11 +16541,13 @@ _loop0_117_rule(Parser *p) _res = _tmp_145_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -16289,8 +16556,9 @@ _loop0_117_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_117"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -16311,7 +16579,8 @@ _loop0_119_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -16333,11 +16602,13 @@ _loop0_119_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -16346,8 +16617,9 @@ _loop0_119_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_119"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -16423,7 +16695,8 @@ _loop0_122_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -16445,11 +16718,13 @@ _loop0_122_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -16458,8 +16733,9 @@ _loop0_122_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_122"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -16508,7 +16784,8 @@ _loop0_124_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -16530,11 +16807,13 @@ _loop0_124_rule(Parser *p) } if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -16543,8 +16822,9 @@ _loop0_124_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_124"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -16632,7 +16912,8 @@ _loop0_126_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -16646,11 +16927,13 @@ _loop0_126_rule(Parser *p) _res = star_named_expressions_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -16659,8 +16942,9 @@ _loop0_126_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_126"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -16828,7 +17112,8 @@ _loop0_131_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -16842,11 +17127,13 @@ _loop0_131_rule(Parser *p) _res = param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -16855,8 +17142,9 @@ _loop0_131_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_131"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); @@ -17354,7 +17642,8 @@ _loop1_146_rule(Parser *p) int _start_mark = p->mark; void **_children = PyMem_Malloc(sizeof(void *)); if (!_children) { - PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } ssize_t _children_capacity = 1; @@ -17368,11 +17657,13 @@ _loop1_146_rule(Parser *p) _res = param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; - _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_children) { - PyErr_Format(PyExc_MemoryError, "realloc None"); + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } + _children = _new_children; } _children[_n++] = _res; _mark = p->mark; @@ -17385,8 +17676,9 @@ _loop1_146_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_146"); PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 6c9aa3f2ba786..8f9972bb41a52 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -320,24 +320,21 @@ def call_with_errorcheck_goto(self, call_text: str, goto_target: str) -> None: def out_of_memory_return( self, expr: str, - returnval: str, - message: str = "Parser out of memory", cleanup_code: Optional[str] = None, ) -> None: self.print(f"if ({expr}) {{") with self.indent(): - self.print(f'PyErr_Format(PyExc_MemoryError, "{message}");') if cleanup_code is not None: self.print(cleanup_code) - self.print(f"return {returnval};") + self.print("p->error_indicator = 1;") + self.print("PyErr_NoMemory();"); + self.print("return NULL;") self.print(f"}}") - def out_of_memory_goto( - self, expr: str, goto_target: str, message: str = "Parser out of memory" - ) -> None: + def out_of_memory_goto(self, expr: str, goto_target: str) -> None: self.print(f"if ({expr}) {{") with self.indent(): - self.print(f'PyErr_Format(PyExc_MemoryError, "{message}");') + self.print("PyErr_NoMemory();") self.print(f"goto {goto_target};") self.print(f"}}") @@ -487,7 +484,7 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N rhs, is_loop=False, is_gather=node.is_gather(), - rulename=node.name if memoize else None, + rulename=node.name, ) if self.debug: self.print('fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') @@ -515,7 +512,7 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: self.print("int _mark = p->mark;") self.print("int _start_mark = p->mark;") self.print("void **_children = PyMem_Malloc(sizeof(void *));") - self.out_of_memory_return(f"!_children", "NULL") + self.out_of_memory_return(f"!_children") self.print("ssize_t _children_capacity = 1;") self.print("ssize_t _n = 0;") if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): @@ -524,7 +521,7 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: rhs, is_loop=True, is_gather=node.is_gather(), - rulename=node.name if memoize else None, + rulename=node.name, ) if is_repeat1: self.print("if (_n == 0 || p->error_indicator) {") @@ -533,12 +530,7 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: self.print("return NULL;") self.print("}") self.print("asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena);") - self.out_of_memory_return( - "!_seq", - "NULL", - message=f"asdl_seq_new {node.name}", - cleanup_code="PyMem_Free(_children);", - ) + self.out_of_memory_return(f"!_seq", cleanup_code="PyMem_Free(_children);") self.print("for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]);") self.print("PyMem_Free(_children);") if node.name: @@ -682,10 +674,9 @@ def handle_alt_loop(self, node: Alt, is_gather: bool, rulename: Optional[str]) - self.print("if (_n == _children_capacity) {") with self.indent(): self.print("_children_capacity *= 2;") - self.print( - "_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));" - ) - self.out_of_memory_return(f"!_children", "NULL", message=f"realloc {rulename}") + self.print("void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));") + self.out_of_memory_return(f"!_new_children") + self.print("_children = _new_children;") self.print("}") self.print("_children[_n++] = _res;") self.print("_mark = p->mark;") From webhook-mailer at python.org Sun May 17 00:15:00 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Sun, 17 May 2020 04:15:00 -0000 Subject: [Python-checkins] bpo-29587: Add another test for the gen.throw() fix. (GH-19859) Message-ID: https://github.com/python/cpython/commit/d7184d3dbd249444ec3961641dc08a9ad3c1ac34 commit: d7184d3dbd249444ec3961641dc08a9ad3c1ac34 branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-16T21:14:48-07:00 summary: bpo-29587: Add another test for the gen.throw() fix. (GH-19859) files: M Lib/test/test_generators.py diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 348ae15aa6532..87cc2dfc8c679 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -332,6 +332,28 @@ def f(): context = cm.exception.__context__ self.assertEqual((type(context), context.args), (KeyError, ('a',))) + def test_exception_context_with_yield_inside_generator(self): + # Check that the context is also available from inside the generator + # with yield, as opposed to outside. + def f(): + try: + raise KeyError('a') + except Exception: + try: + yield + except Exception as exc: + self.assertEqual(type(exc), ValueError) + context = exc.__context__ + self.assertEqual((type(context), context.args), + (KeyError, ('a',))) + yield 'b' + + gen = f() + gen.send(None) + actual = gen.throw(ValueError) + # This ensures that the assertions inside were executed. + self.assertEqual(actual, 'b') + def test_exception_context_with_yield_from(self): def f(): yield From webhook-mailer at python.org Sun May 17 02:32:53 2020 From: webhook-mailer at python.org (Erlend Egeberg Aasland) Date: Sun, 17 May 2020 06:32:53 -0000 Subject: [Python-checkins] bpo-35569: Expose RFC 3542 IPv6 socket options on macOS (GH-19526) Message-ID: https://github.com/python/cpython/commit/9a45bfe6f4aedd2a9d94cb12aa276057b15d8b63 commit: 9a45bfe6f4aedd2a9d94cb12aa276057b15d8b63 branch: master author: Erlend Egeberg Aasland committer: GitHub date: 2020-05-17T02:32:46-04:00 summary: bpo-35569: Expose RFC 3542 IPv6 socket options on macOS (GH-19526) files: A Misc/NEWS.d/next/macOS/2020-04-15-00-02-47.bpo-35569.02_1MV.rst M Lib/test/test_socket.py M setup.py diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index 4a436cf3c143a..aefba4f397b10 100755 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -959,6 +959,37 @@ def testWindowsSpecificConstants(self): socket.IPPROTO_L2TP socket.IPPROTO_SCTP + @unittest.skipUnless(sys.platform == 'darwin', 'macOS specific test') + @unittest.skipUnless(socket_helper.IPV6_ENABLED, 'IPv6 required for this test') + def test3542SocketOptions(self): + # Ref. issue #35569 and https://tools.ietf.org/html/rfc3542 + opts = { + 'IPV6_CHECKSUM', + 'IPV6_DONTFRAG', + 'IPV6_DSTOPTS', + 'IPV6_HOPLIMIT', + 'IPV6_HOPOPTS', + 'IPV6_NEXTHOP', + 'IPV6_PATHMTU', + 'IPV6_PKTINFO', + 'IPV6_RECVDSTOPTS', + 'IPV6_RECVHOPLIMIT', + 'IPV6_RECVHOPOPTS', + 'IPV6_RECVPATHMTU', + 'IPV6_RECVPKTINFO', + 'IPV6_RECVRTHDR', + 'IPV6_RECVTCLASS', + 'IPV6_RTHDR', + 'IPV6_RTHDRDSTOPTS', + 'IPV6_RTHDR_TYPE_0', + 'IPV6_TCLASS', + 'IPV6_USE_MIN_MTU', + } + for opt in opts: + self.assertTrue( + hasattr(socket, opt), f"Missing RFC3542 socket option '{opt}'" + ) + def testHostnameRes(self): # Testing hostname resolution mechanisms hostname = socket.gethostname() diff --git a/Misc/NEWS.d/next/macOS/2020-04-15-00-02-47.bpo-35569.02_1MV.rst b/Misc/NEWS.d/next/macOS/2020-04-15-00-02-47.bpo-35569.02_1MV.rst new file mode 100644 index 0000000000000..ed48efd7f5c49 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-04-15-00-02-47.bpo-35569.02_1MV.rst @@ -0,0 +1 @@ +Expose RFC 3542 IPv6 socket options. diff --git a/setup.py b/setup.py index 68fc3120cc317..0f92a9c010810 100644 --- a/setup.py +++ b/setup.py @@ -1116,8 +1116,12 @@ def detect_crypt(self): def detect_socket(self): # socket(2) if not VXWORKS: - self.add(Extension('_socket', ['socketmodule.c'], - depends=['socketmodule.h'])) + kwargs = {'depends': ['socketmodule.h']} + if MACOS: + # Issue #35569: Expose RFC 3542 socket options. + kwargs['extra_compile_args'] = ['-D__APPLE_USE_RFC_3542'] + + self.add(Extension('_socket', ['socketmodule.c'], **kwargs)) elif self.compiler.find_library_file(self.lib_dirs, 'net'): libs = ['net'] self.add(Extension('_socket', ['socketmodule.c'], From webhook-mailer at python.org Sun May 17 02:57:32 2020 From: webhook-mailer at python.org (Ned Deily) Date: Sun, 17 May 2020 06:57:32 -0000 Subject: [Python-checkins] bpo-35569: add Erlend to Misc/ACKS (GH-20146) Message-ID: https://github.com/python/cpython/commit/fa098b6bc8662cceb944ad5a4a3e5eb63d3cb517 commit: fa098b6bc8662cceb944ad5a4a3e5eb63d3cb517 branch: master author: Ned Deily committer: GitHub date: 2020-05-17T02:57:25-04:00 summary: bpo-35569: add Erlend to Misc/ACKS (GH-20146) files: M Misc/ACKS diff --git a/Misc/ACKS b/Misc/ACKS index a9345e097741f..6511383fa25d7 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -12,6 +12,7 @@ PS: In the standard Python distribution, this file is encoded in UTF-8 and the list is in rough alphabetical order by last names. Aahz +Erlend Egeberg Aasland Edison Abahurire Michael Abbott Rajiv Abraham From webhook-mailer at python.org Sun May 17 06:52:53 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Sun, 17 May 2020 10:52:53 -0000 Subject: [Python-checkins] bpo-39976: Add **other_popen_kwargs to subprocess docs (GH-20145) Message-ID: https://github.com/python/cpython/commit/46545000c2a30b46aed717b546bc09e5bae7148f commit: 46545000c2a30b46aed717b546bc09e5bae7148f branch: master author: Zackery Spytz committer: GitHub date: 2020-05-17T03:52:47-07:00 summary: bpo-39976: Add **other_popen_kwargs to subprocess docs (GH-20145) Patch by Zackery Spytz. files: M Doc/library/subprocess.rst diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 24497a2edd357..e5dbfe42809fa 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -40,7 +40,7 @@ compatibility with older versions, see the :ref:`call-function-trio` section. .. function:: run(args, *, stdin=None, input=None, stdout=None, stderr=None,\ capture_output=False, shell=False, cwd=None, timeout=None, \ check=False, encoding=None, errors=None, text=None, env=None, \ - universal_newlines=None) + universal_newlines=None, **other_popen_kwargs) Run the command described by *args*. Wait for command to complete, then return a :class:`CompletedProcess` instance. @@ -1085,7 +1085,8 @@ Prior to Python 3.5, these three functions comprised the high level API to subprocess. You can now use :func:`run` in many cases, but lots of existing code calls these functions. -.. function:: call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None) +.. function:: call(args, *, stdin=None, stdout=None, stderr=None, \ + shell=False, cwd=None, timeout=None, **other_popen_kwargs) Run the command described by *args*. Wait for command to complete, then return the :attr:`~Popen.returncode` attribute. @@ -1111,7 +1112,9 @@ calls these functions. .. versionchanged:: 3.3 *timeout* was added. -.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None) +.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, \ + shell=False, cwd=None, timeout=None, \ + **other_popen_kwargs) Run command with arguments. Wait for command to complete. If the return code was zero then return, otherwise raise :exc:`CalledProcessError`. The @@ -1142,7 +1145,8 @@ calls these functions. .. function:: check_output(args, *, stdin=None, stderr=None, shell=False, \ cwd=None, encoding=None, errors=None, \ - universal_newlines=None, timeout=None, text=None) + universal_newlines=None, timeout=None, text=None, \ + **other_popen_kwargs) Run command with arguments and return its output. From webhook-mailer at python.org Sun May 17 07:49:14 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Sun, 17 May 2020 11:49:14 -0000 Subject: [Python-checkins] bpo-40645: Implement HMAC in C (GH-20129) Message-ID: https://github.com/python/cpython/commit/54f2898fe7e4ca1f239e96284af3cc5b34d2ae02 commit: 54f2898fe7e4ca1f239e96284af3cc5b34d2ae02 branch: master author: Christian Heimes committer: GitHub date: 2020-05-17T13:49:10+02:00 summary: bpo-40645: Implement HMAC in C (GH-20129) The internal module ``_hashlib`` wraps and exposes OpenSSL's HMAC API. The new code will be used in Python 3.10 after the internal implementation details of the pure Python HMAC module are no longer part of the public API. The code is based on a patch by Petr Viktorin for RHEL and Python 3.6. Co-Authored-By: Petr Viktorin files: A Misc/NEWS.d/next/Library/2020-05-16-17-05-02.bpo-40645.wYSkjT.rst M Lib/test/test_hmac.py M Modules/_hashopenssl.c M Modules/clinic/_hashopenssl.c.h diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py index 1f3ec4cb9172d..7a52e39c5d471 100644 --- a/Lib/test/test_hmac.py +++ b/Lib/test/test_hmac.py @@ -8,6 +8,13 @@ from test.support import hashlib_helper +try: + from _hashlib import HMAC as C_HMAC + from _hashlib import hmac_new as c_hmac_new +except ImportError: + C_HMAC = None + c_hmac_new = None + def ignore_warning(func): @functools.wraps(func) @@ -21,34 +28,91 @@ def wrapper(*args, **kwargs): class TestVectorsTestCase(unittest.TestCase): - @hashlib_helper.requires_hashdigest('md5', openssl=True) - def test_md5_vectors(self): - # Test the HMAC module against test vectors from the RFC. + def asssert_hmac( + self, key, data, digest, hashfunc, hashname, digest_size, block_size + ): + h = hmac.HMAC(key, data, digestmod=hashfunc) + self.assertEqual(h.hexdigest().upper(), digest.upper()) + self.assertEqual(h.digest(), binascii.unhexlify(digest)) + self.assertEqual(h.name, f"hmac-{hashname}") + self.assertEqual(h.digest_size, digest_size) + self.assertEqual(h.block_size, block_size) + + h = hmac.HMAC(key, data, digestmod=hashname) + self.assertEqual(h.hexdigest().upper(), digest.upper()) + self.assertEqual(h.digest(), binascii.unhexlify(digest)) + self.assertEqual(h.name, f"hmac-{hashname}") + self.assertEqual(h.digest_size, digest_size) + self.assertEqual(h.block_size, block_size) + + h = hmac.HMAC(key, digestmod=hashname) + h2 = h.copy() + h2.update(b"test update") + h.update(data) + self.assertEqual(h.hexdigest().upper(), digest.upper()) + + h = hmac.new(key, data, digestmod=hashname) + self.assertEqual(h.hexdigest().upper(), digest.upper()) + self.assertEqual(h.digest(), binascii.unhexlify(digest)) + self.assertEqual(h.name, f"hmac-{hashname}") + self.assertEqual(h.digest_size, digest_size) + self.assertEqual(h.block_size, block_size) + + h = hmac.new(key, None, digestmod=hashname) + h.update(data) + self.assertEqual(h.hexdigest().upper(), digest.upper()) + + h = hmac.new(key, digestmod=hashname) + h.update(data) + self.assertEqual(h.hexdigest().upper(), digest.upper()) + + h = hmac.new(key, data, digestmod=hashfunc) + self.assertEqual(h.hexdigest().upper(), digest.upper()) + + self.assertEqual( + hmac.digest(key, data, digest=hashname), + binascii.unhexlify(digest) + ) + self.assertEqual( + hmac.digest(key, data, digest=hashfunc), + binascii.unhexlify(digest) + ) + with unittest.mock.patch('hmac._openssl_md_meths', {}): + self.assertEqual( + hmac.digest(key, data, digest=hashname), + binascii.unhexlify(digest) + ) + self.assertEqual( + hmac.digest(key, data, digest=hashfunc), + binascii.unhexlify(digest) + ) - def md5test(key, data, digest): - h = hmac.HMAC(key, data, digestmod=hashlib.md5) + if c_hmac_new is not None: + h = c_hmac_new(key, data, digestmod=hashname) self.assertEqual(h.hexdigest().upper(), digest.upper()) self.assertEqual(h.digest(), binascii.unhexlify(digest)) - self.assertEqual(h.name, "hmac-md5") - self.assertEqual(h.digest_size, 16) - self.assertEqual(h.block_size, 64) + self.assertEqual(h.name, f"hmac-{hashname}") + self.assertEqual(h.digest_size, digest_size) + self.assertEqual(h.block_size, block_size) - h = hmac.HMAC(key, data, digestmod='md5') + h = c_hmac_new(key, digestmod=hashname) + h2 = h.copy() + h2.update(b"test update") + h.update(data) self.assertEqual(h.hexdigest().upper(), digest.upper()) - self.assertEqual(h.digest(), binascii.unhexlify(digest)) - self.assertEqual(h.name, "hmac-md5") - self.assertEqual(h.digest_size, 16) - self.assertEqual(h.block_size, 64) - self.assertEqual( - hmac.digest(key, data, digest='md5'), - binascii.unhexlify(digest) + @hashlib_helper.requires_hashdigest('md5', openssl=True) + def test_md5_vectors(self): + # Test the HMAC module against test vectors from the RFC. + + def md5test(key, data, digest): + self.asssert_hmac( + key, data, digest, + hashfunc=hashlib.md5, + hashname="md5", + digest_size=16, + block_size=64 ) - with unittest.mock.patch('hmac._openssl_md_meths', {}): - self.assertEqual( - hmac.digest(key, data, digest='md5'), - binascii.unhexlify(digest) - ) md5test(b"\x0b" * 16, b"Hi There", @@ -82,26 +146,14 @@ def md5test(key, data, digest): @hashlib_helper.requires_hashdigest('sha1', openssl=True) def test_sha_vectors(self): def shatest(key, data, digest): - h = hmac.HMAC(key, data, digestmod=hashlib.sha1) - self.assertEqual(h.hexdigest().upper(), digest.upper()) - self.assertEqual(h.digest(), binascii.unhexlify(digest)) - self.assertEqual(h.name, "hmac-sha1") - self.assertEqual(h.digest_size, 20) - self.assertEqual(h.block_size, 64) - - h = hmac.HMAC(key, data, digestmod='sha1') - self.assertEqual(h.hexdigest().upper(), digest.upper()) - self.assertEqual(h.digest(), binascii.unhexlify(digest)) - self.assertEqual(h.name, "hmac-sha1") - self.assertEqual(h.digest_size, 20) - self.assertEqual(h.block_size, 64) - - self.assertEqual( - hmac.digest(key, data, digest='sha1'), - binascii.unhexlify(digest) + self.asssert_hmac( + key, data, digest, + hashfunc=hashlib.sha1, + hashname="sha1", + digest_size=20, + block_size=64 ) - shatest(b"\x0b" * 20, b"Hi There", "b617318655057264e28bc0b6fb378c8ef146be00") @@ -133,37 +185,15 @@ def shatest(key, data, digest): def _rfc4231_test_cases(self, hashfunc, hash_name, digest_size, block_size): def hmactest(key, data, hexdigests): - hmac_name = "hmac-" + hash_name - h = hmac.HMAC(key, data, digestmod=hashfunc) - self.assertEqual(h.hexdigest().lower(), hexdigests[hashfunc]) - self.assertEqual(h.name, hmac_name) - self.assertEqual(h.digest_size, digest_size) - self.assertEqual(h.block_size, block_size) - - h = hmac.HMAC(key, data, digestmod=hash_name) - self.assertEqual(h.hexdigest().lower(), hexdigests[hashfunc]) - self.assertEqual(h.name, hmac_name) - self.assertEqual(h.digest_size, digest_size) - self.assertEqual(h.block_size, block_size) - - self.assertEqual( - hmac.digest(key, data, digest=hashfunc), - binascii.unhexlify(hexdigests[hashfunc]) + digest = hexdigests[hashfunc] + + self.asssert_hmac( + key, data, digest, + hashfunc=hashfunc, + hashname=hash_name, + digest_size=digest_size, + block_size=block_size ) - self.assertEqual( - hmac.digest(key, data, digest=hash_name), - binascii.unhexlify(hexdigests[hashfunc]) - ) - - with unittest.mock.patch('hmac._openssl_md_meths', {}): - self.assertEqual( - hmac.digest(key, data, digest=hashfunc), - binascii.unhexlify(hexdigests[hashfunc]) - ) - self.assertEqual( - hmac.digest(key, data, digest=hash_name), - binascii.unhexlify(hexdigests[hashfunc]) - ) # 4.2. Test Case 1 hmactest(key = b'\x0b'*20, @@ -385,6 +415,14 @@ def test_withmodule(self): except Exception: self.fail("Constructor call with hashlib.sha256 raised exception.") + @unittest.skipUnless(C_HMAC is not None, 'need _hashlib') + def test_internal_types(self): + # internal types like _hashlib.C_HMAC are not constructable + with self.assertRaisesRegex( + TypeError, "cannot create 'HMAC' instance" + ): + C_HMAC() + class SanityTestCase(unittest.TestCase): @@ -395,9 +433,9 @@ def test_exercise_all_methods(self): try: h = hmac.HMAC(b"my secret key", digestmod="sha256") h.update(b"compute the hash of this text!") - dig = h.digest() - dig = h.hexdigest() - h2 = h.copy() + h.digest() + h.hexdigest() + h.copy() except Exception: self.fail("Exception raised during normal usage of HMAC class.") @@ -450,6 +488,21 @@ def test_equality(self): self.assertEqual(h1.hexdigest(), h2.hexdigest(), "Hexdigest of copy doesn't match original hexdigest.") + @hashlib_helper.requires_hashdigest('sha256') + def test_equality_new(self): + # Testing if the copy has the same digests with hmac.new(). + h1 = hmac.new(b"key", digestmod="sha256") + h1.update(b"some random text") + h2 = h1.copy() + self.assertTrue( + id(h1) != id(h2), "No real copy of the HMAC instance." + ) + self.assertEqual(h1.digest(), h2.digest(), + "Digest of copy doesn't match original digest.") + self.assertEqual(h1.hexdigest(), h2.hexdigest(), + "Hexdigest of copy doesn't match original hexdigest.") + + class CompareDigestTestCase(unittest.TestCase): def test_compare_digest(self): diff --git a/Misc/NEWS.d/next/Library/2020-05-16-17-05-02.bpo-40645.wYSkjT.rst b/Misc/NEWS.d/next/Library/2020-05-16-17-05-02.bpo-40645.wYSkjT.rst new file mode 100644 index 0000000000000..bb7eacdc5aa78 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-16-17-05-02.bpo-40645.wYSkjT.rst @@ -0,0 +1 @@ +The internal module ``_hashlib`` wraps and exposes OpenSSL's HMAC API. The new code will be used in Python 3.10 after the internal implementation details of the pure Python HMAC module are no longer part of the public API. diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 936b515addbc1..292e92a375268 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -35,6 +35,32 @@ /* OpenSSL < 1.1.0 */ #define EVP_MD_CTX_new EVP_MD_CTX_create #define EVP_MD_CTX_free EVP_MD_CTX_destroy + +HMAC_CTX * +HMAC_CTX_new(void) +{ + HMAC_CTX *ctx = OPENSSL_malloc(sizeof(HMAC_CTX)); + if (ctx != NULL) { + memset(ctx, 0, sizeof(HMAC_CTX)); + HMAC_CTX_init(ctx); + } + return ctx; +} + +void +HMAC_CTX_free(HMAC_CTX *ctx) +{ + if (ctx != NULL) { + HMAC_CTX_cleanup(ctx); + OPENSSL_free(ctx); + } +} + +const EVP_MD * +HMAC_CTX_get_md(const HMAC_CTX *ctx) +{ + return ctx->md; +} #endif #define MUNCH_SIZE INT_MAX @@ -55,6 +81,7 @@ static PyModuleDef _hashlibmodule; typedef struct { PyTypeObject *EVPtype; + PyTypeObject *HMACtype; #ifdef PY_OPENSSL_HAS_SHAKE PyTypeObject *EVPXOFtype; #endif @@ -77,14 +104,20 @@ typedef struct { PyThread_type_lock lock; /* OpenSSL context lock */ } EVPobject; +typedef struct { + PyObject_HEAD + HMAC_CTX *ctx; /* OpenSSL hmac context */ + PyThread_type_lock lock; /* HMAC context lock */ +} HMACobject; #include "clinic/_hashopenssl.c.h" /*[clinic input] module _hashlib class _hashlib.HASH "EVPobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPtype" class _hashlib.HASHXOF "EVPobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPXOFtype" +class _hashlib.HMAC "HMACobject *" "((_hashlibstate *)PyModule_GetState(module))->HMACtype" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=813acc7b2d8f322c]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=7df1bcf6f75cb8ef]*/ /* LCOV_EXCL_START */ @@ -1091,7 +1124,7 @@ pbkdf2_hmac_impl(PyObject *module, const char *hash_name, int retval; const EVP_MD *digest; - digest = EVP_get_digestbyname(hash_name); + digest = py_digest_by_name(hash_name); if (digest == NULL) { PyErr_SetString(PyExc_ValueError, "unsupported hash type"); goto end; @@ -1293,7 +1326,7 @@ _hashlib_scrypt_impl(PyObject *module, Py_buffer *password, Py_buffer *salt, */ /*[clinic input] -_hashlib.hmac_digest +_hashlib.hmac_digest as _hashlib_hmac_singleshot key: Py_buffer msg: Py_buffer @@ -1303,16 +1336,16 @@ Single-shot HMAC. [clinic start generated code]*/ static PyObject * -_hashlib_hmac_digest_impl(PyObject *module, Py_buffer *key, Py_buffer *msg, - const char *digest) -/*[clinic end generated code: output=75630e684cdd8762 input=562d2f4249511bd3]*/ +_hashlib_hmac_singleshot_impl(PyObject *module, Py_buffer *key, + Py_buffer *msg, const char *digest) +/*[clinic end generated code: output=15658ede5ab98185 input=019dffc571909a46]*/ { unsigned char md[EVP_MAX_MD_SIZE] = {0}; unsigned int md_len = 0; unsigned char *result; const EVP_MD *evp; - evp = EVP_get_digestbyname(digest); + evp = py_digest_by_name(digest); if (evp == NULL) { PyErr_SetString(PyExc_ValueError, "unsupported hash type"); return NULL; @@ -1344,6 +1377,354 @@ _hashlib_hmac_digest_impl(PyObject *module, Py_buffer *key, Py_buffer *msg, return PyBytes_FromStringAndSize((const char*)md, md_len); } +/* OpenSSL-based HMAC implementation + */ + +static int _hmac_update(HMACobject*, PyObject*); + +/*[clinic input] +_hashlib.hmac_new + + key: Py_buffer + msg as msg_obj: object(c_default="NULL") = b'' + digestmod: str(c_default="NULL") = None + +Return a new hmac object. +[clinic start generated code]*/ + +static PyObject * +_hashlib_hmac_new_impl(PyObject *module, Py_buffer *key, PyObject *msg_obj, + const char *digestmod) +/*[clinic end generated code: output=9a35673be0cbea1b input=a0878868eb190134]*/ +{ + PyTypeObject *type = get_hashlib_state(module)->HMACtype; + const EVP_MD *digest; + HMAC_CTX *ctx = NULL; + HMACobject *self = NULL; + int r; + + if ((digestmod == NULL) || !strlen(digestmod)) { + PyErr_SetString( + PyExc_TypeError, "Missing required parameter 'digestmod'."); + return NULL; + } + + digest = py_digest_by_name(digestmod); + if (!digest) { + PyErr_SetString(PyExc_ValueError, "unknown hash function"); + return NULL; + } + + ctx = HMAC_CTX_new(); + if (ctx == NULL) { + _setException(PyExc_ValueError); + goto error; + } + + r = HMAC_Init_ex( + ctx, + (const char*)key->buf, + key->len, + digest, + NULL /*impl*/); + if (r == 0) { + _setException(PyExc_ValueError); + goto error; + } + + self = (HMACobject *)PyObject_New(HMACobject, type); + if (self == NULL) { + goto error; + } + + self->ctx = ctx; + self->lock = NULL; + + if ((msg_obj != NULL) && (msg_obj != Py_None)) { + if (!_hmac_update(self, msg_obj)) + goto error; + } + + return (PyObject*)self; + +error: + if (ctx) HMAC_CTX_free(ctx); + if (self) PyObject_Del(self); + return NULL; +} + +/* helper functions */ +static int +locked_HMAC_CTX_copy(HMAC_CTX *new_ctx_p, HMACobject *self) +{ + int result; + ENTER_HASHLIB(self); + result = HMAC_CTX_copy(new_ctx_p, self->ctx); + LEAVE_HASHLIB(self); + return result; +} + +static unsigned int +_hmac_digest_size(HMACobject *self) +{ + unsigned int digest_size = EVP_MD_size(HMAC_CTX_get_md(self->ctx)); + assert(digest_size <= EVP_MAX_MD_SIZE); + return digest_size; +} + +static int +_hmac_update(HMACobject *self, PyObject *obj) +{ + int r; + Py_buffer view = {0}; + + GET_BUFFER_VIEW_OR_ERROR(obj, &view, return 0); + + if (self->lock == NULL && view.len >= HASHLIB_GIL_MINSIZE) { + self->lock = PyThread_allocate_lock(); + /* fail? lock = NULL and we fail over to non-threaded code. */ + } + + if (self->lock != NULL) { + ENTER_HASHLIB(self); + r = HMAC_Update(self->ctx, (const unsigned char*)view.buf, view.len); + LEAVE_HASHLIB(self); + } else { + r = HMAC_Update(self->ctx, (const unsigned char*)view.buf, view.len); + } + + PyBuffer_Release(&view); + + if (r == 0) { + _setException(PyExc_ValueError); + return 0; + } + return 1; +} + +/*[clinic input] +_hashlib.HMAC.copy + +Return a copy ("clone") of the HMAC object. +[clinic start generated code]*/ + +static PyObject * +_hashlib_HMAC_copy_impl(HMACobject *self) +/*[clinic end generated code: output=29aa28b452833127 input=e2fa6a05db61a4d6]*/ +{ + HMACobject *retval; + + HMAC_CTX *ctx = HMAC_CTX_new(); + if (ctx == NULL) { + return _setException(PyExc_ValueError); + } + if (!locked_HMAC_CTX_copy(ctx, self)) { + HMAC_CTX_free(ctx); + return _setException(PyExc_ValueError); + } + + retval = (HMACobject *)PyObject_New(HMACobject, Py_TYPE(self)); + if (retval == NULL) { + HMAC_CTX_free(ctx); + return NULL; + } + retval->ctx = ctx; + retval->lock = NULL; + + return (PyObject *)retval; +} + +static void +_hmac_dealloc(HMACobject *self) +{ + PyTypeObject *tp = Py_TYPE(self); + if (self->lock != NULL) { + PyThread_free_lock(self->lock); + } + HMAC_CTX_free(self->ctx); + PyObject_Del(self); + Py_DECREF(tp); +} + +static PyObject * +_hmac_repr(HMACobject *self) +{ + PyObject *digest_name = py_digest_name(HMAC_CTX_get_md(self->ctx)); + if (digest_name == NULL) { + return NULL; + } + PyObject *repr = PyUnicode_FromFormat( + "<%U HMAC object @ %p>", digest_name, self + ); + Py_DECREF(digest_name); + return repr; +} + +/*[clinic input] +_hashlib.HMAC.update + msg: object + +Update the HMAC object with msg. +[clinic start generated code]*/ + +static PyObject * +_hashlib_HMAC_update_impl(HMACobject *self, PyObject *msg) +/*[clinic end generated code: output=f31f0ace8c625b00 input=1829173bb3cfd4e6]*/ +{ + if (!_hmac_update(self, msg)) { + return NULL; + } + Py_RETURN_NONE; +} + +static int +_hmac_digest(HMACobject *self, unsigned char *buf, unsigned int len) +{ + HMAC_CTX *temp_ctx = HMAC_CTX_new(); + if (temp_ctx == NULL) { + PyErr_NoMemory(); + return 0; + } + if (!locked_HMAC_CTX_copy(temp_ctx, self)) { + _setException(PyExc_ValueError); + return 0; + } + int r = HMAC_Final(temp_ctx, buf, &len); + HMAC_CTX_free(temp_ctx); + if (r == 0) { + _setException(PyExc_ValueError); + return 0; + } + return 1; +} + +/*[clinic input] +_hashlib.HMAC.digest +Return the digest of the bytes passed to the update() method so far. +[clinic start generated code]*/ + +static PyObject * +_hashlib_HMAC_digest_impl(HMACobject *self) +/*[clinic end generated code: output=1b1424355af7a41e input=bff07f74da318fb4]*/ +{ + unsigned char digest[EVP_MAX_MD_SIZE]; + unsigned int digest_size = _hmac_digest_size(self); + if (digest_size == 0) { + return _setException(PyExc_ValueError); + } + int r = _hmac_digest(self, digest, digest_size); + if (r == 0) { + return NULL; + } + return PyBytes_FromStringAndSize((const char *)digest, digest_size); +} + +/*[clinic input] +_hashlib.HMAC.hexdigest + +Return hexadecimal digest of the bytes passed to the update() method so far. + +This may be used to exchange the value safely in email or other non-binary +environments. +[clinic start generated code]*/ + +static PyObject * +_hashlib_HMAC_hexdigest_impl(HMACobject *self) +/*[clinic end generated code: output=80d825be1eaae6a7 input=5abc42702874ddcf]*/ +{ + unsigned char digest[EVP_MAX_MD_SIZE]; + unsigned int digest_size = _hmac_digest_size(self); + if (digest_size == 0) { + return _setException(PyExc_ValueError); + } + int r = _hmac_digest(self, digest, digest_size); + if (r == 0) { + return NULL; + } + return _Py_strhex((const char *)digest, digest_size); +} + +static PyObject * +_hashlib_hmac_get_digest_size(HMACobject *self, void *closure) +{ + unsigned int digest_size = _hmac_digest_size(self); + if (digest_size == 0) { + return _setException(PyExc_ValueError); + } + return PyLong_FromLong(digest_size); +} + +static PyObject * +_hashlib_hmac_get_block_size(HMACobject *self, void *closure) +{ + const EVP_MD *md = HMAC_CTX_get_md(self->ctx); + if (md == NULL) { + return _setException(PyExc_ValueError); + } + return PyLong_FromLong(EVP_MD_block_size(md)); +} + +static PyObject * +_hashlib_hmac_get_name(HMACobject *self, void *closure) +{ + PyObject *digest_name = py_digest_name(HMAC_CTX_get_md(self->ctx)); + if (digest_name == NULL) { + return NULL; + } + PyObject *name = PyUnicode_FromFormat("hmac-%U", digest_name); + Py_DECREF(digest_name); + return name; +} + +static PyMethodDef HMAC_methods[] = { + _HASHLIB_HMAC_UPDATE_METHODDEF + _HASHLIB_HMAC_DIGEST_METHODDEF + _HASHLIB_HMAC_HEXDIGEST_METHODDEF + _HASHLIB_HMAC_COPY_METHODDEF + {NULL, NULL} /* sentinel */ +}; + +static PyGetSetDef HMAC_getset[] = { + {"digest_size", (getter)_hashlib_hmac_get_digest_size, NULL, NULL, NULL}, + {"block_size", (getter)_hashlib_hmac_get_block_size, NULL, NULL, NULL}, + {"name", (getter)_hashlib_hmac_get_name, NULL, NULL, NULL}, + {NULL} /* Sentinel */ +}; + + +PyDoc_STRVAR(hmactype_doc, +"The object used to calculate HMAC of a message.\n\ +\n\ +Methods:\n\ +\n\ +update() -- updates the current digest with an additional string\n\ +digest() -- return the current digest value\n\ +hexdigest() -- return the current digest as a string of hexadecimal digits\n\ +copy() -- return a copy of the current hash object\n\ +\n\ +Attributes:\n\ +\n\ +name -- the name, including the hash algorithm used by this object\n\ +digest_size -- number of bytes in digest() output\n"); + +static PyType_Slot HMACtype_slots[] = { + {Py_tp_doc, (char *)hmactype_doc}, + {Py_tp_repr, (reprfunc)_hmac_repr}, + {Py_tp_dealloc,(destructor)_hmac_dealloc}, + {Py_tp_methods, HMAC_methods}, + {Py_tp_getset, HMAC_getset}, + {Py_tp_new, _disabled_new}, + {0, NULL} +}; + +PyType_Spec HMACtype_spec = { + "_hashlib.HMAC", /* name */ + sizeof(HMACobject), /* basicsize */ + .flags = Py_TPFLAGS_DEFAULT, + .slots = HMACtype_slots, +}; + + /* State for our callback function so that it can accumulate a result. */ typedef struct _internal_name_mapper_state { PyObject *set; @@ -1448,7 +1829,8 @@ static struct PyMethodDef EVP_functions[] = { PBKDF2_HMAC_METHODDEF _HASHLIB_SCRYPT_METHODDEF _HASHLIB_GET_FIPS_MODE_METHODDEF - _HASHLIB_HMAC_DIGEST_METHODDEF + _HASHLIB_HMAC_SINGLESHOT_METHODDEF + _HASHLIB_HMAC_NEW_METHODDEF _HASHLIB_OPENSSL_MD5_METHODDEF _HASHLIB_OPENSSL_SHA1_METHODDEF _HASHLIB_OPENSSL_SHA224_METHODDEF @@ -1472,6 +1854,7 @@ hashlib_traverse(PyObject *m, visitproc visit, void *arg) { _hashlibstate *state = get_hashlib_state(m); Py_VISIT(state->EVPtype); + Py_VISIT(state->HMACtype); #ifdef PY_OPENSSL_HAS_SHAKE Py_VISIT(state->EVPXOFtype); #endif @@ -1483,6 +1866,7 @@ hashlib_clear(PyObject *m) { _hashlibstate *state = get_hashlib_state(m); Py_CLEAR(state->EVPtype); + Py_CLEAR(state->HMACtype); #ifdef PY_OPENSSL_HAS_SHAKE Py_CLEAR(state->EVPXOFtype); #endif @@ -1541,10 +1925,18 @@ PyInit__hashlib(void) return NULL; } state->EVPtype = EVPtype; - Py_INCREF((PyObject *)state->EVPtype); PyModule_AddObject(m, "HASH", (PyObject *)state->EVPtype); + PyTypeObject *HMACtype = (PyTypeObject *)PyType_FromSpec(&HMACtype_spec); + if (HMACtype == NULL) { + Py_DECREF(m); + return NULL; + } + state->HMACtype = HMACtype; + Py_INCREF((PyObject *)state->HMACtype); + PyModule_AddObject(m, "HMAC", (PyObject *)state->HMACtype); + #ifdef PY_OPENSSL_HAS_SHAKE bases = PyTuple_Pack(1, (PyObject *)EVPtype); if (bases == NULL) { diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index 71c9246c95a93..8745fc7052ba1 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -1095,21 +1095,21 @@ _hashlib_scrypt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj #endif /* (OPENSSL_VERSION_NUMBER > 0x10100000L && !defined(OPENSSL_NO_SCRYPT) && !defined(LIBRESSL_VERSION_NUMBER)) */ -PyDoc_STRVAR(_hashlib_hmac_digest__doc__, +PyDoc_STRVAR(_hashlib_hmac_singleshot__doc__, "hmac_digest($module, /, key, msg, digest)\n" "--\n" "\n" "Single-shot HMAC."); -#define _HASHLIB_HMAC_DIGEST_METHODDEF \ - {"hmac_digest", (PyCFunction)(void(*)(void))_hashlib_hmac_digest, METH_FASTCALL|METH_KEYWORDS, _hashlib_hmac_digest__doc__}, +#define _HASHLIB_HMAC_SINGLESHOT_METHODDEF \ + {"hmac_digest", (PyCFunction)(void(*)(void))_hashlib_hmac_singleshot, METH_FASTCALL|METH_KEYWORDS, _hashlib_hmac_singleshot__doc__}, static PyObject * -_hashlib_hmac_digest_impl(PyObject *module, Py_buffer *key, Py_buffer *msg, - const char *digest); +_hashlib_hmac_singleshot_impl(PyObject *module, Py_buffer *key, + Py_buffer *msg, const char *digest); static PyObject * -_hashlib_hmac_digest(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_hashlib_hmac_singleshot(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"key", "msg", "digest", NULL}; @@ -1150,7 +1150,7 @@ _hashlib_hmac_digest(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _hashlib_hmac_digest_impl(module, &key, &msg, digest); + return_value = _hashlib_hmac_singleshot_impl(module, &key, &msg, digest); exit: /* Cleanup for key */ @@ -1165,6 +1165,165 @@ _hashlib_hmac_digest(PyObject *module, PyObject *const *args, Py_ssize_t nargs, return return_value; } +PyDoc_STRVAR(_hashlib_hmac_new__doc__, +"hmac_new($module, /, key, msg=b\'\', digestmod=None)\n" +"--\n" +"\n" +"Return a new hmac object."); + +#define _HASHLIB_HMAC_NEW_METHODDEF \ + {"hmac_new", (PyCFunction)(void(*)(void))_hashlib_hmac_new, METH_FASTCALL|METH_KEYWORDS, _hashlib_hmac_new__doc__}, + +static PyObject * +_hashlib_hmac_new_impl(PyObject *module, Py_buffer *key, PyObject *msg_obj, + const char *digestmod); + +static PyObject * +_hashlib_hmac_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"key", "msg", "digestmod", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "hmac_new", 0}; + PyObject *argsbuf[3]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + Py_buffer key = {NULL, NULL}; + PyObject *msg_obj = NULL; + const char *digestmod = NULL; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 3, 0, argsbuf); + if (!args) { + goto exit; + } + if (PyObject_GetBuffer(args[0], &key, PyBUF_SIMPLE) != 0) { + goto exit; + } + if (!PyBuffer_IsContiguous(&key, 'C')) { + _PyArg_BadArgument("hmac_new", "argument 'key'", "contiguous buffer", args[0]); + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (args[1]) { + msg_obj = args[1]; + if (!--noptargs) { + goto skip_optional_pos; + } + } + if (!PyUnicode_Check(args[2])) { + _PyArg_BadArgument("hmac_new", "argument 'digestmod'", "str", args[2]); + goto exit; + } + Py_ssize_t digestmod_length; + digestmod = PyUnicode_AsUTF8AndSize(args[2], &digestmod_length); + if (digestmod == NULL) { + goto exit; + } + if (strlen(digestmod) != (size_t)digestmod_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } +skip_optional_pos: + return_value = _hashlib_hmac_new_impl(module, &key, msg_obj, digestmod); + +exit: + /* Cleanup for key */ + if (key.obj) { + PyBuffer_Release(&key); + } + + return return_value; +} + +PyDoc_STRVAR(_hashlib_HMAC_copy__doc__, +"copy($self, /)\n" +"--\n" +"\n" +"Return a copy (\"clone\") of the HMAC object."); + +#define _HASHLIB_HMAC_COPY_METHODDEF \ + {"copy", (PyCFunction)_hashlib_HMAC_copy, METH_NOARGS, _hashlib_HMAC_copy__doc__}, + +static PyObject * +_hashlib_HMAC_copy_impl(HMACobject *self); + +static PyObject * +_hashlib_HMAC_copy(HMACobject *self, PyObject *Py_UNUSED(ignored)) +{ + return _hashlib_HMAC_copy_impl(self); +} + +PyDoc_STRVAR(_hashlib_HMAC_update__doc__, +"update($self, /, msg)\n" +"--\n" +"\n" +"Update the HMAC object with msg."); + +#define _HASHLIB_HMAC_UPDATE_METHODDEF \ + {"update", (PyCFunction)(void(*)(void))_hashlib_HMAC_update, METH_FASTCALL|METH_KEYWORDS, _hashlib_HMAC_update__doc__}, + +static PyObject * +_hashlib_HMAC_update_impl(HMACobject *self, PyObject *msg); + +static PyObject * +_hashlib_HMAC_update(HMACobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"msg", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "update", 0}; + PyObject *argsbuf[1]; + PyObject *msg; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + msg = args[0]; + return_value = _hashlib_HMAC_update_impl(self, msg); + +exit: + return return_value; +} + +PyDoc_STRVAR(_hashlib_HMAC_digest__doc__, +"digest($self, /)\n" +"--\n" +"\n" +"Return the digest of the bytes passed to the update() method so far."); + +#define _HASHLIB_HMAC_DIGEST_METHODDEF \ + {"digest", (PyCFunction)_hashlib_HMAC_digest, METH_NOARGS, _hashlib_HMAC_digest__doc__}, + +static PyObject * +_hashlib_HMAC_digest_impl(HMACobject *self); + +static PyObject * +_hashlib_HMAC_digest(HMACobject *self, PyObject *Py_UNUSED(ignored)) +{ + return _hashlib_HMAC_digest_impl(self); +} + +PyDoc_STRVAR(_hashlib_HMAC_hexdigest__doc__, +"hexdigest($self, /)\n" +"--\n" +"\n" +"Return hexadecimal digest of the bytes passed to the update() method so far.\n" +"\n" +"This may be used to exchange the value safely in email or other non-binary\n" +"environments."); + +#define _HASHLIB_HMAC_HEXDIGEST_METHODDEF \ + {"hexdigest", (PyCFunction)_hashlib_HMAC_hexdigest, METH_NOARGS, _hashlib_HMAC_hexdigest__doc__}, + +static PyObject * +_hashlib_HMAC_hexdigest_impl(HMACobject *self); + +static PyObject * +_hashlib_HMAC_hexdigest(HMACobject *self, PyObject *Py_UNUSED(ignored)) +{ + return _hashlib_HMAC_hexdigest_impl(self); +} + #if !defined(LIBRESSL_VERSION_NUMBER) PyDoc_STRVAR(_hashlib_get_fips_mode__doc__, @@ -1243,4 +1402,4 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF #define _HASHLIB_GET_FIPS_MODE_METHODDEF #endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */ -/*[clinic end generated code: output=a39bf0a766d7cdf7 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=972a198d2e8434bd input=a9049054013a1b77]*/ From webhook-mailer at python.org Sun May 17 12:32:55 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Sun, 17 May 2020 16:32:55 -0000 Subject: [Python-checkins] bpo-37630: Fix spelling shake128 -> shake_128 (GH-20154) Message-ID: https://github.com/python/cpython/commit/62ecd8a8f908282726d2f019c93efa1cf2e9e784 commit: 62ecd8a8f908282726d2f019c93efa1cf2e9e784 branch: master author: Christian Heimes committer: GitHub date: 2020-05-17T18:32:38+02:00 summary: bpo-37630: Fix spelling shake128 -> shake_128 (GH-20154) files: M Modules/_hashopenssl.c M Modules/clinic/_hashopenssl.c.h diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 292e92a375268..36ad6a65d72cf 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -1062,39 +1062,39 @@ _hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj, #ifdef PY_OPENSSL_HAS_SHAKE /*[clinic input] -_hashlib.openssl_shake128 +_hashlib.openssl_shake_128 string as data_obj: object(py_default="b''") = NULL * usedforsecurity: bool = True -Returns a shake128 variable hash object; optionally initialized with a string +Returns a shake-128 variable hash object; optionally initialized with a string [clinic start generated code]*/ static PyObject * -_hashlib_openssl_shake128_impl(PyObject *module, PyObject *data_obj, - int usedforsecurity) -/*[clinic end generated code: output=c68a0e30b4c09e1a input=b6d1e9566bacbb64]*/ +_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity) +/*[clinic end generated code: output=bc49cdd8ada1fa97 input=6c9d67440eb33ec8]*/ { return EVP_fast_new(module, data_obj, EVP_shake128(), usedforsecurity); } /*[clinic input] -_hashlib.openssl_shake256 +_hashlib.openssl_shake_256 string as data_obj: object(py_default="b''") = NULL * usedforsecurity: bool = True -Returns a shake256 variable hash object; optionally initialized with a string +Returns a shake-256 variable hash object; optionally initialized with a string [clinic start generated code]*/ static PyObject * -_hashlib_openssl_shake256_impl(PyObject *module, PyObject *data_obj, - int usedforsecurity) -/*[clinic end generated code: output=d56387762dcad516 input=591b9b78c0498116]*/ +_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity) +/*[clinic end generated code: output=358d213be8852df7 input=479cbe9fefd4a9f8]*/ { return EVP_fast_new(module, data_obj, EVP_shake256(), usedforsecurity); } @@ -1841,8 +1841,8 @@ static struct PyMethodDef EVP_functions[] = { _HASHLIB_OPENSSL_SHA3_256_METHODDEF _HASHLIB_OPENSSL_SHA3_384_METHODDEF _HASHLIB_OPENSSL_SHA3_512_METHODDEF - _HASHLIB_OPENSSL_SHAKE128_METHODDEF - _HASHLIB_OPENSSL_SHAKE256_METHODDEF + _HASHLIB_OPENSSL_SHAKE_128_METHODDEF + _HASHLIB_OPENSSL_SHAKE_256_METHODDEF {NULL, NULL} /* Sentinel */ }; diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index 8745fc7052ba1..5ab4e996bf96b 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -766,25 +766,25 @@ _hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t na #if defined(PY_OPENSSL_HAS_SHAKE) -PyDoc_STRVAR(_hashlib_openssl_shake128__doc__, -"openssl_shake128($module, /, string=b\'\', *, usedforsecurity=True)\n" +PyDoc_STRVAR(_hashlib_openssl_shake_128__doc__, +"openssl_shake_128($module, /, string=b\'\', *, usedforsecurity=True)\n" "--\n" "\n" -"Returns a shake128 variable hash object; optionally initialized with a string"); +"Returns a shake-128 variable hash object; optionally initialized with a string"); -#define _HASHLIB_OPENSSL_SHAKE128_METHODDEF \ - {"openssl_shake128", (PyCFunction)(void(*)(void))_hashlib_openssl_shake128, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake128__doc__}, +#define _HASHLIB_OPENSSL_SHAKE_128_METHODDEF \ + {"openssl_shake_128", (PyCFunction)(void(*)(void))_hashlib_openssl_shake_128, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake_128__doc__}, static PyObject * -_hashlib_openssl_shake128_impl(PyObject *module, PyObject *data_obj, - int usedforsecurity); +_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity); static PyObject * -_hashlib_openssl_shake128(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "openssl_shake128", 0}; + static _PyArg_Parser _parser = {NULL, _keywords, "openssl_shake_128", 0}; PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; PyObject *data_obj = NULL; @@ -812,7 +812,7 @@ _hashlib_openssl_shake128(PyObject *module, PyObject *const *args, Py_ssize_t na goto exit; } skip_optional_kwonly: - return_value = _hashlib_openssl_shake128_impl(module, data_obj, usedforsecurity); + return_value = _hashlib_openssl_shake_128_impl(module, data_obj, usedforsecurity); exit: return return_value; @@ -822,25 +822,25 @@ _hashlib_openssl_shake128(PyObject *module, PyObject *const *args, Py_ssize_t na #if defined(PY_OPENSSL_HAS_SHAKE) -PyDoc_STRVAR(_hashlib_openssl_shake256__doc__, -"openssl_shake256($module, /, string=b\'\', *, usedforsecurity=True)\n" +PyDoc_STRVAR(_hashlib_openssl_shake_256__doc__, +"openssl_shake_256($module, /, string=b\'\', *, usedforsecurity=True)\n" "--\n" "\n" -"Returns a shake256 variable hash object; optionally initialized with a string"); +"Returns a shake-256 variable hash object; optionally initialized with a string"); -#define _HASHLIB_OPENSSL_SHAKE256_METHODDEF \ - {"openssl_shake256", (PyCFunction)(void(*)(void))_hashlib_openssl_shake256, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake256__doc__}, +#define _HASHLIB_OPENSSL_SHAKE_256_METHODDEF \ + {"openssl_shake_256", (PyCFunction)(void(*)(void))_hashlib_openssl_shake_256, METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake_256__doc__}, static PyObject * -_hashlib_openssl_shake256_impl(PyObject *module, PyObject *data_obj, - int usedforsecurity); +_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data_obj, + int usedforsecurity); static PyObject * -_hashlib_openssl_shake256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "openssl_shake256", 0}; + static _PyArg_Parser _parser = {NULL, _keywords, "openssl_shake_256", 0}; PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; PyObject *data_obj = NULL; @@ -868,7 +868,7 @@ _hashlib_openssl_shake256(PyObject *module, PyObject *const *args, Py_ssize_t na goto exit; } skip_optional_kwonly: - return_value = _hashlib_openssl_shake256_impl(module, data_obj, usedforsecurity); + return_value = _hashlib_openssl_shake_256_impl(module, data_obj, usedforsecurity); exit: return return_value; @@ -1387,13 +1387,13 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #define _HASHLIB_OPENSSL_SHA3_512_METHODDEF #endif /* !defined(_HASHLIB_OPENSSL_SHA3_512_METHODDEF) */ -#ifndef _HASHLIB_OPENSSL_SHAKE128_METHODDEF - #define _HASHLIB_OPENSSL_SHAKE128_METHODDEF -#endif /* !defined(_HASHLIB_OPENSSL_SHAKE128_METHODDEF) */ +#ifndef _HASHLIB_OPENSSL_SHAKE_128_METHODDEF + #define _HASHLIB_OPENSSL_SHAKE_128_METHODDEF +#endif /* !defined(_HASHLIB_OPENSSL_SHAKE_128_METHODDEF) */ -#ifndef _HASHLIB_OPENSSL_SHAKE256_METHODDEF - #define _HASHLIB_OPENSSL_SHAKE256_METHODDEF -#endif /* !defined(_HASHLIB_OPENSSL_SHAKE256_METHODDEF) */ +#ifndef _HASHLIB_OPENSSL_SHAKE_256_METHODDEF + #define _HASHLIB_OPENSSL_SHAKE_256_METHODDEF +#endif /* !defined(_HASHLIB_OPENSSL_SHAKE_256_METHODDEF) */ #ifndef _HASHLIB_SCRYPT_METHODDEF #define _HASHLIB_SCRYPT_METHODDEF @@ -1402,4 +1402,4 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF #define _HASHLIB_GET_FIPS_MODE_METHODDEF #endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */ -/*[clinic end generated code: output=972a198d2e8434bd input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a0bff5dcef88de6a input=a9049054013a1b77]*/ From webhook-mailer at python.org Sun May 17 17:25:07 2020 From: webhook-mailer at python.org (Mathieu Dupuy) Date: Sun, 17 May 2020 21:25:07 -0000 Subject: [Python-checkins] Doc: move a dot outside double quotes (GH-20007) Message-ID: https://github.com/python/cpython/commit/4eba67783eb2084b2dad875ed1cbffdaf8a9202e commit: 4eba67783eb2084b2dad875ed1cbffdaf8a9202e branch: master author: Mathieu Dupuy committer: GitHub date: 2020-05-17T23:24:59+02:00 summary: Doc: move a dot outside double quotes (GH-20007) files: M Doc/library/datetime.rst diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 4daf5df0efb11..800361c54ba71 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -35,7 +35,7 @@ on efficient attribute extraction for output formatting and manipulation. Aware and Naive Objects ----------------------- -Date and time objects may be categorized as "aware" or "naive." +Date and time objects may be categorized as "aware" or "naive". With sufficient knowledge of applicable algorithmic and political time adjustments, such as time zone and daylight saving time information, From webhook-mailer at python.org Sun May 17 17:29:55 2020 From: webhook-mailer at python.org (Mathieu Dupuy) Date: Sun, 17 May 2020 21:29:55 -0000 Subject: [Python-checkins] Doc: change 'Posix' for 'POSIX' (GH-20001) Message-ID: https://github.com/python/cpython/commit/65460565df99fbda6a74b6bb4bf99affaaf8bd95 commit: 65460565df99fbda6a74b6bb4bf99affaaf8bd95 branch: master author: Mathieu Dupuy committer: GitHub date: 2020-05-17T23:29:51+02:00 summary: Doc: change 'Posix' for 'POSIX' (GH-20001) files: M Doc/howto/sockets.rst M Doc/library/subprocess.rst M Doc/library/sysconfig.rst diff --git a/Doc/howto/sockets.rst b/Doc/howto/sockets.rst index 4655f28060896..d6ed128e073fd 100644 --- a/Doc/howto/sockets.rst +++ b/Doc/howto/sockets.rst @@ -319,7 +319,7 @@ inside-out. In Python, you use ``socket.setblocking(False)`` to make it non-blocking. In C, it's more complex, (for one thing, you'll need to choose between the BSD flavor -``O_NONBLOCK`` and the almost indistinguishable Posix flavor ``O_NDELAY``, which +``O_NONBLOCK`` and the almost indistinguishable POSIX flavor ``O_NDELAY``, which is completely different from ``TCP_NODELAY``), but it's the exact same idea. You do this after creating the socket, but before using it. (Actually, if you're nuts, you can switch back and forth.) diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index e5dbfe42809fa..5988bd35e72b1 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -791,14 +791,14 @@ Instances of the :class:`Popen` class have the following methods: .. method:: Popen.terminate() - Stop the child. On Posix OSs the method sends SIGTERM to the + Stop the child. On POSIX OSs the method sends SIGTERM to the child. On Windows the Win32 API function :c:func:`TerminateProcess` is called to stop the child. .. method:: Popen.kill() - Kills the child. On Posix OSs the function sends SIGKILL to the child. + Kills the child. On POSIX OSs the function sends SIGKILL to the child. On Windows :meth:`kill` is an alias for :meth:`terminate`. diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst index b5a1da80c686d..78a1dfce9ae05 100644 --- a/Doc/library/sysconfig.rst +++ b/Doc/library/sysconfig.rst @@ -74,12 +74,12 @@ places. Python currently supports seven schemes: -- *posix_prefix*: scheme for Posix platforms like Linux or Mac OS X. This is +- *posix_prefix*: scheme for POSIX platforms like Linux or Mac OS X. This is the default scheme used when Python or a component is installed. -- *posix_home*: scheme for Posix platforms used when a *home* option is used +- *posix_home*: scheme for POSIX platforms used when a *home* option is used upon installation. This scheme is used when a component is installed through Distutils with a specific home prefix. -- *posix_user*: scheme for Posix platforms used when a component is installed +- *posix_user*: scheme for POSIX platforms used when a component is installed through Distutils and the *user* option is used. This scheme defines paths located under the user home directory. - *nt*: scheme for NT platforms like Windows. From webhook-mailer at python.org Sun May 17 17:32:48 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 17 May 2020 21:32:48 -0000 Subject: [Python-checkins] Doc: move a dot outside double quotes (GH-20007) Message-ID: https://github.com/python/cpython/commit/b1e23f3c321facdbcc1ffe6e2914d21038a12b36 commit: b1e23f3c321facdbcc1ffe6e2914d21038a12b36 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-17T14:32:43-07:00 summary: Doc: move a dot outside double quotes (GH-20007) (cherry picked from commit 4eba67783eb2084b2dad875ed1cbffdaf8a9202e) Co-authored-by: Mathieu Dupuy files: M Doc/library/datetime.rst diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 29c6b5a4736c0..577cc0e1c1f4b 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -35,7 +35,7 @@ on efficient attribute extraction for output formatting and manipulation. Aware and Naive Objects ----------------------- -Date and time objects may be categorized as "aware" or "naive." +Date and time objects may be categorized as "aware" or "naive". With sufficient knowledge of applicable algorithmic and political time adjustments, such as time zone and daylight saving time information, From webhook-mailer at python.org Sun May 17 17:35:28 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 17 May 2020 21:35:28 -0000 Subject: [Python-checkins] Doc: change 'Posix' for 'POSIX' (GH-20001) Message-ID: https://github.com/python/cpython/commit/205b554570d3dc5fb102b1cfc515922ffd842e43 commit: 205b554570d3dc5fb102b1cfc515922ffd842e43 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-17T14:35:22-07:00 summary: Doc: change 'Posix' for 'POSIX' (GH-20001) (cherry picked from commit 65460565df99fbda6a74b6bb4bf99affaaf8bd95) Co-authored-by: Mathieu Dupuy files: M Doc/howto/sockets.rst M Doc/library/subprocess.rst M Doc/library/sysconfig.rst diff --git a/Doc/howto/sockets.rst b/Doc/howto/sockets.rst index bc71d85a83e92..b5c2152ec7004 100644 --- a/Doc/howto/sockets.rst +++ b/Doc/howto/sockets.rst @@ -319,7 +319,7 @@ inside-out. In Python, you use ``socket.setblocking(0)`` to make it non-blocking. In C, it's more complex, (for one thing, you'll need to choose between the BSD flavor -``O_NONBLOCK`` and the almost indistinguishable Posix flavor ``O_NDELAY``, which +``O_NONBLOCK`` and the almost indistinguishable POSIX flavor ``O_NDELAY``, which is completely different from ``TCP_NODELAY``), but it's the exact same idea. You do this after creating the socket, but before using it. (Actually, if you're nuts, you can switch back and forth.) diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 69737820f37d8..9b5b4565c76ab 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -711,14 +711,14 @@ Instances of the :class:`Popen` class have the following methods: .. method:: Popen.terminate() - Stop the child. On Posix OSs the method sends SIGTERM to the + Stop the child. On POSIX OSs the method sends SIGTERM to the child. On Windows the Win32 API function :c:func:`TerminateProcess` is called to stop the child. .. method:: Popen.kill() - Kills the child. On Posix OSs the function sends SIGKILL to the child. + Kills the child. On POSIX OSs the function sends SIGKILL to the child. On Windows :meth:`kill` is an alias for :meth:`terminate`. diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst index b5a1da80c686d..78a1dfce9ae05 100644 --- a/Doc/library/sysconfig.rst +++ b/Doc/library/sysconfig.rst @@ -74,12 +74,12 @@ places. Python currently supports seven schemes: -- *posix_prefix*: scheme for Posix platforms like Linux or Mac OS X. This is +- *posix_prefix*: scheme for POSIX platforms like Linux or Mac OS X. This is the default scheme used when Python or a component is installed. -- *posix_home*: scheme for Posix platforms used when a *home* option is used +- *posix_home*: scheme for POSIX platforms used when a *home* option is used upon installation. This scheme is used when a component is installed through Distutils with a specific home prefix. -- *posix_user*: scheme for Posix platforms used when a component is installed +- *posix_user*: scheme for POSIX platforms used when a component is installed through Distutils and the *user* option is used. This scheme defines paths located under the user home directory. - *nt*: scheme for NT platforms like Windows. From webhook-mailer at python.org Sun May 17 17:37:35 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 17 May 2020 21:37:35 -0000 Subject: [Python-checkins] Doc: change 'Posix' for 'POSIX' (GH-20001) Message-ID: https://github.com/python/cpython/commit/7a3522d478d456b38ef5e647c21595904bea79df commit: 7a3522d478d456b38ef5e647c21595904bea79df branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-17T14:37:30-07:00 summary: Doc: change 'Posix' for 'POSIX' (GH-20001) (cherry picked from commit 65460565df99fbda6a74b6bb4bf99affaaf8bd95) Co-authored-by: Mathieu Dupuy files: M Doc/howto/sockets.rst M Doc/library/subprocess.rst M Doc/library/sysconfig.rst diff --git a/Doc/howto/sockets.rst b/Doc/howto/sockets.rst index bc71d85a83e92..b5c2152ec7004 100644 --- a/Doc/howto/sockets.rst +++ b/Doc/howto/sockets.rst @@ -319,7 +319,7 @@ inside-out. In Python, you use ``socket.setblocking(0)`` to make it non-blocking. In C, it's more complex, (for one thing, you'll need to choose between the BSD flavor -``O_NONBLOCK`` and the almost indistinguishable Posix flavor ``O_NDELAY``, which +``O_NONBLOCK`` and the almost indistinguishable POSIX flavor ``O_NDELAY``, which is completely different from ``TCP_NODELAY``), but it's the exact same idea. You do this after creating the socket, but before using it. (Actually, if you're nuts, you can switch back and forth.) diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index cce7da1c9b166..0b692b4dec6c9 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -755,14 +755,14 @@ Instances of the :class:`Popen` class have the following methods: .. method:: Popen.terminate() - Stop the child. On Posix OSs the method sends SIGTERM to the + Stop the child. On POSIX OSs the method sends SIGTERM to the child. On Windows the Win32 API function :c:func:`TerminateProcess` is called to stop the child. .. method:: Popen.kill() - Kills the child. On Posix OSs the function sends SIGKILL to the child. + Kills the child. On POSIX OSs the function sends SIGKILL to the child. On Windows :meth:`kill` is an alias for :meth:`terminate`. diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst index b5a1da80c686d..78a1dfce9ae05 100644 --- a/Doc/library/sysconfig.rst +++ b/Doc/library/sysconfig.rst @@ -74,12 +74,12 @@ places. Python currently supports seven schemes: -- *posix_prefix*: scheme for Posix platforms like Linux or Mac OS X. This is +- *posix_prefix*: scheme for POSIX platforms like Linux or Mac OS X. This is the default scheme used when Python or a component is installed. -- *posix_home*: scheme for Posix platforms used when a *home* option is used +- *posix_home*: scheme for POSIX platforms used when a *home* option is used upon installation. This scheme is used when a component is installed through Distutils with a specific home prefix. -- *posix_user*: scheme for Posix platforms used when a component is installed +- *posix_user*: scheme for POSIX platforms used when a component is installed through Distutils and the *user* option is used. This scheme defines paths located under the user home directory. - *nt*: scheme for NT platforms like Windows. From webhook-mailer at python.org Sun May 17 20:57:50 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 18 May 2020 00:57:50 -0000 Subject: [Python-checkins] bpo-40597: email: Use CTE if lines are longer than max_line_length consistently (gh-20038) (gh-20084) Message-ID: https://github.com/python/cpython/commit/c1f1ddf30a595c2bfa3c06e54fb03fa212cd28b5 commit: c1f1ddf30a595c2bfa3c06e54fb03fa212cd28b5 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-17T20:57:42-04:00 summary: bpo-40597: email: Use CTE if lines are longer than max_line_length consistently (gh-20038) (gh-20084) raw_data_manager (default for EmailPolicy, EmailMessage) does correct wrapping of 'text' parts as long as the message contains characters outside of 7bit US-ASCII set: base64 or qp Content-Transfer-Encoding is applied if the lines would be too long without it. It did not, however, do this for ascii-only text, which could result in lines that were longer than policy.max_line_length or even the rfc 998 maximum. This changeset fixes the heuristic so that if lines are longer than policy.max_line_length, it will always apply a content-transfer-encoding so that the lines are wrapped correctly. (cherry picked from commit 6f2f475d5a2cd7675dce844f3af436ba919ef92b) Co-authored-by: Arkadiusz Hiler files: A Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst M Lib/email/contentmanager.py M Lib/test/test_email/test_contentmanager.py diff --git a/Lib/email/contentmanager.py b/Lib/email/contentmanager.py index b904ded94c92e..2b4b8757f46f6 100644 --- a/Lib/email/contentmanager.py +++ b/Lib/email/contentmanager.py @@ -146,13 +146,13 @@ def embedded_body(lines): return linesep.join(lines) + linesep def normal_body(lines): return b'\n'.join(lines) + b'\n' if cte==None: # Use heuristics to decide on the "best" encoding. - try: - return '7bit', normal_body(lines).decode('ascii') - except UnicodeDecodeError: - pass - if (policy.cte_type == '8bit' and - max(len(x) for x in lines) <= policy.max_line_length): - return '8bit', normal_body(lines).decode('ascii', 'surrogateescape') + if max(len(x) for x in lines) <= policy.max_line_length: + try: + return '7bit', normal_body(lines).decode('ascii') + except UnicodeDecodeError: + pass + if policy.cte_type == '8bit': + return '8bit', normal_body(lines).decode('ascii', 'surrogateescape') sniff = embedded_body(lines[:10]) sniff_qp = quoprimime.body_encode(sniff.decode('latin-1'), policy.max_line_length) diff --git a/Lib/test/test_email/test_contentmanager.py b/Lib/test/test_email/test_contentmanager.py index 169058eac83da..64dca2d017e62 100644 --- a/Lib/test/test_email/test_contentmanager.py +++ b/Lib/test/test_email/test_contentmanager.py @@ -329,6 +329,21 @@ def test_set_text_charset_latin_1(self): self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content) self.assertEqual(m.get_content(), content) + def test_set_text_plain_long_line_heuristics(self): + m = self._make_message() + content = ("Simple but long message that is over 78 characters" + " long to force transfer encoding.\n") + raw_data_manager.set_content(m, content) + self.assertEqual(str(m), textwrap.dedent("""\ + Content-Type: text/plain; charset="utf-8" + Content-Transfer-Encoding: quoted-printable + + Simple but long message that is over 78 characters long to = + force transfer encoding. + """)) + self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content) + self.assertEqual(m.get_content(), content) + def test_set_text_short_line_minimal_non_ascii_heuristics(self): m = self._make_message() content = "et l? il est mont? sur moi et il commence ? m'?to.\n" diff --git a/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst new file mode 100644 index 0000000000000..1b9fe609c25b7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst @@ -0,0 +1 @@ +If text content lines are longer than policy.max_line_length, always use a content-encoding to make sure they are wrapped. From webhook-mailer at python.org Sun May 17 21:32:42 2020 From: webhook-mailer at python.org (Rahul Kumaresan) Date: Mon, 18 May 2020 01:32:42 -0000 Subject: [Python-checkins] bpo-39705 : sorted() tutorial example under looping techniques improved (GH-18999) Message-ID: https://github.com/python/cpython/commit/eefd4e033334a2a1d3929d0f7978469e5b5c4e56 commit: eefd4e033334a2a1d3929d0f7978469e5b5c4e56 branch: master author: Rahul Kumaresan committer: GitHub date: 2020-05-17T18:32:34-07:00 summary: bpo-39705 : sorted() tutorial example under looping techniques improved (GH-18999) files: A Misc/NEWS.d/next/Documentation/2020-03-14-18-37-06.bpo-39705.nQVqig.rst M Doc/tutorial/datastructures.rst diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst index 0edb73ad73691..ff4c797f66cd6 100644 --- a/Doc/tutorial/datastructures.rst +++ b/Doc/tutorial/datastructures.rst @@ -613,6 +613,21 @@ direction and then call the :func:`reversed` function. :: To loop over a sequence in sorted order, use the :func:`sorted` function which returns a new sorted list while leaving the source unaltered. :: + >>> basket = ['apple', 'orange', 'apple', 'pear', 'orange', 'banana'] + >>> for i in sorted(basket): + ... print(i) + ... + apple + apple + banana + orange + orange + pear + +Using :func:`set` on a sequence eliminates duplicate elements. The use of +:func:`sorted` in combination with :func:`set` over a sequence is an idiomatic +way to loop over unique elements of the sequence in sorted order. :: + >>> basket = ['apple', 'orange', 'apple', 'pear', 'orange', 'banana'] >>> for f in sorted(set(basket)): ... print(f) diff --git a/Misc/NEWS.d/next/Documentation/2020-03-14-18-37-06.bpo-39705.nQVqig.rst b/Misc/NEWS.d/next/Documentation/2020-03-14-18-37-06.bpo-39705.nQVqig.rst new file mode 100644 index 0000000000000..3454b928e70b4 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-03-14-18-37-06.bpo-39705.nQVqig.rst @@ -0,0 +1,2 @@ +Tutorial example for sorted() in the Loop Techniques section is given a better explanation. +Also a new example is included to explain sorted()'s basic behavior. \ No newline at end of file From webhook-mailer at python.org Sun May 17 21:53:10 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Mon, 18 May 2020 01:53:10 -0000 Subject: [Python-checkins] bpo-39058: Preserve attribute order in argparse Namespace reprs. (GH-17621) Message-ID: https://github.com/python/cpython/commit/9681953c99b686cf23d1c476a2b26d2ddbec7694 commit: 9681953c99b686cf23d1c476a2b26d2ddbec7694 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-17T18:53:01-07:00 summary: bpo-39058: Preserve attribute order in argparse Namespace reprs. (GH-17621) files: A Misc/NEWS.d/next/Library/2019-12-15-19-17-10.bpo-39058.7ci-vd.rst M Lib/argparse.py M Lib/test/test_argparse.py diff --git a/Lib/argparse.py b/Lib/argparse.py index 9c710cef5b6aa..2677ef63e9e54 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -129,7 +129,7 @@ def __repr__(self): return '%s(%s)' % (type_name, ', '.join(arg_strings)) def _get_kwargs(self): - return sorted(self.__dict__.items()) + return list(self.__dict__.items()) def _get_args(self): return [] diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index 9899a53d6d197..e82a0c39c21a8 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -4725,7 +4725,7 @@ def test_argument(self): def test_namespace(self): ns = argparse.Namespace(foo=42, bar='spam') - string = "Namespace(bar='spam', foo=42)" + string = "Namespace(foo=42, bar='spam')" self.assertStringEqual(ns, string) def test_namespace_starkwargs_notidentifier(self): diff --git a/Misc/NEWS.d/next/Library/2019-12-15-19-17-10.bpo-39058.7ci-vd.rst b/Misc/NEWS.d/next/Library/2019-12-15-19-17-10.bpo-39058.7ci-vd.rst new file mode 100644 index 0000000000000..fff13223bc4cd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-12-15-19-17-10.bpo-39058.7ci-vd.rst @@ -0,0 +1,4 @@ +In the argparse module, the repr for Namespace() and other argument holders +now displayed in the order attributes were added. Formerly, it displayed in +alphabetical order even though argument order is preserved the user visible +parts of the module. From webhook-mailer at python.org Sun May 17 21:55:16 2020 From: webhook-mailer at python.org (Paul Ganssle) Date: Mon, 18 May 2020 01:55:16 -0000 Subject: [Python-checkins] bpo-40536: Add zoneinfo.available_timezones (GH-20158) Message-ID: https://github.com/python/cpython/commit/e527ec8abe0849e784ce100f53c2736986b670ae commit: e527ec8abe0849e784ce100f53c2736986b670ae branch: master author: Paul Ganssle committer: GitHub date: 2020-05-17T21:55:11-04:00 summary: bpo-40536: Add zoneinfo.available_timezones (GH-20158) This was not specified in the PEP, but it will likely be a frequently requested feature if it's not included. This includes only the "canonical" zones, not a simple listing of every valid value of `key` that can be passed to `Zoneinfo`, because it seems likely that that's what people will want. files: A Misc/NEWS.d/next/Library/2020-05-17-14-00-12.bpo-40536.FCpoRA.rst M Doc/library/zoneinfo.rst M Lib/test/test_zoneinfo/_support.py M Lib/test/test_zoneinfo/test_zoneinfo.py M Lib/zoneinfo/__init__.py M Lib/zoneinfo/_tzpath.py diff --git a/Doc/library/zoneinfo.rst b/Doc/library/zoneinfo.rst index d8e2796c7f631..1b6f2e7bd15f0 100644 --- a/Doc/library/zoneinfo.rst +++ b/Doc/library/zoneinfo.rst @@ -337,6 +337,29 @@ pickled in an environment with a different version of the time zone data. Functions --------- +.. function:: available_timezones() + + Get a set containing all the valid keys for IANA time zones available + anywhere on the time zone path. This is recalculated on every call to the + function. + + This function only includes canonical zone names and does not include + "special" zones such as those under the ``posix/`` and ``right/`` + directories, or the ``posixrules`` zone. + + .. caution:: + + This function may open a large number of files, as the best way to + determine if a file on the time zone path is a valid time zone is to + read the "magic string" at the beginning. + + .. note:: + + These values are not designed to be exposed to end-users; for user + facing elements, applications should use something like CLDR (the + Unicode Common Locale Data Repository) to get more user-friendly + strings. See also the cautionary note on :attr:`ZoneInfo.key`. + .. function:: reset_tzpath(to=None) Sets or resets the time zone search path (:data:`TZPATH`) for the module. diff --git a/Lib/test/test_zoneinfo/_support.py b/Lib/test/test_zoneinfo/_support.py index 6bd8d8dc0fbfe..0fe162c258368 100644 --- a/Lib/test/test_zoneinfo/_support.py +++ b/Lib/test/test_zoneinfo/_support.py @@ -66,11 +66,35 @@ def setUpClass(cls): super().setUpClass() @contextlib.contextmanager - def tzpath_context(self, tzpath, lock=TZPATH_LOCK): + def tzpath_context(self, tzpath, block_tzdata=True, lock=TZPATH_LOCK): + def pop_tzdata_modules(): + tzdata_modules = {} + for modname in list(sys.modules): + if modname.split(".", 1)[0] != "tzdata": # pragma: nocover + continue + + tzdata_modules[modname] = sys.modules.pop(modname) + + return tzdata_modules + with lock: + if block_tzdata: + # In order to fully exclude tzdata from the path, we need to + # clear the sys.modules cache of all its contents ? setting the + # root package to None is not enough to block direct access of + # already-imported submodules (though it will prevent new + # imports of submodules). + tzdata_modules = pop_tzdata_modules() + sys.modules["tzdata"] = None + old_path = self.module.TZPATH try: self.module.reset_tzpath(tzpath) yield finally: + if block_tzdata: + sys.modules.pop("tzdata") + for modname, module in tzdata_modules.items(): + sys.modules[modname] = module + self.module.reset_tzpath(old_path) diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py index 05db03abf25e9..fe2c380c51442 100644 --- a/Lib/test/test_zoneinfo/test_zoneinfo.py +++ b/Lib/test/test_zoneinfo/test_zoneinfo.py @@ -16,6 +16,7 @@ import tempfile import unittest from datetime import date, datetime, time, timedelta, timezone +from functools import cached_property from . import _support as test_support from ._support import ( @@ -72,10 +73,18 @@ class TzPathUserMixin: def tzpath(self): # pragma: nocover return None + @property + def block_tzdata(self): + return True + def setUp(self): with contextlib.ExitStack() as stack: stack.enter_context( - self.tzpath_context(self.tzpath, lock=TZPATH_TEST_LOCK) + self.tzpath_context( + self.tzpath, + block_tzdata=self.block_tzdata, + lock=TZPATH_TEST_LOCK, + ) ) self.addCleanup(stack.pop_all().close) @@ -522,6 +531,10 @@ class TZDataTests(ZoneInfoTest): def tzpath(self): return [] + @property + def block_tzdata(self): + return False + def zone_from_key(self, key): return self.klass(key=key) @@ -1628,6 +1641,32 @@ class CTzPathTest(TzPathTest): class TestModule(ZoneInfoTestBase): module = py_zoneinfo + @property + def zoneinfo_data(self): + return ZONEINFO_DATA + + @cached_property + def _UTC_bytes(self): + zone_file = self.zoneinfo_data.path_from_key("UTC") + with open(zone_file, "rb") as f: + return f.read() + + def touch_zone(self, key, tz_root): + """Creates a valid TZif file at key under the zoneinfo root tz_root. + + tz_root must exist, but all folders below that will be created. + """ + if not os.path.exists(tz_root): + raise FileNotFoundError(f"{tz_root} does not exist.") + + root_dir, *tail = key.rsplit("/", 1) + if tail: # If there's no tail, then the first component isn't a dir + os.makedirs(os.path.join(tz_root, root_dir), exist_ok=True) + + zonefile_path = os.path.join(tz_root, key) + with open(zonefile_path, "wb") as f: + f.write(self._UTC_bytes) + def test_getattr_error(self): with self.assertRaises(AttributeError): self.module.NOATTRIBUTE @@ -1648,6 +1687,79 @@ def test_dir_unique(self): self.assertCountEqual(module_dir, module_unique) + def test_available_timezones(self): + with self.tzpath_context([self.zoneinfo_data.tzpath]): + self.assertTrue(self.zoneinfo_data.keys) # Sanity check + + available_keys = self.module.available_timezones() + zoneinfo_keys = set(self.zoneinfo_data.keys) + + # If tzdata is not present, zoneinfo_keys == available_keys, + # otherwise it should be a subset. + union = zoneinfo_keys & available_keys + self.assertEqual(zoneinfo_keys, union) + + def test_available_timezones_weirdzone(self): + with tempfile.TemporaryDirectory() as td: + # Make a fictional zone at "Mars/Olympus_Mons" + self.touch_zone("Mars/Olympus_Mons", td) + + with self.tzpath_context([td]): + available_keys = self.module.available_timezones() + self.assertIn("Mars/Olympus_Mons", available_keys) + + def test_folder_exclusions(self): + expected = { + "America/Los_Angeles", + "America/Santiago", + "America/Indiana/Indianapolis", + "UTC", + "Europe/Paris", + "Europe/London", + "Asia/Tokyo", + "Australia/Sydney", + } + + base_tree = list(expected) + posix_tree = [f"posix/{x}" for x in base_tree] + right_tree = [f"right/{x}" for x in base_tree] + + cases = [ + ("base_tree", base_tree), + ("base_and_posix", base_tree + posix_tree), + ("base_and_right", base_tree + right_tree), + ("all_trees", base_tree + right_tree + posix_tree), + ] + + with tempfile.TemporaryDirectory() as td: + for case_name, tree in cases: + tz_root = os.path.join(td, case_name) + os.mkdir(tz_root) + + for key in tree: + self.touch_zone(key, tz_root) + + with self.tzpath_context([tz_root]): + with self.subTest(case_name): + actual = self.module.available_timezones() + self.assertEqual(actual, expected) + + def test_exclude_posixrules(self): + expected = { + "America/New_York", + "Europe/London", + } + + tree = list(expected) + ["posixrules"] + + with tempfile.TemporaryDirectory() as td: + for key in tree: + self.touch_zone(key, td) + + with self.tzpath_context([td]): + actual = self.module.available_timezones() + self.assertEqual(actual, expected) + class CTestModule(TestModule): module = c_zoneinfo diff --git a/Lib/zoneinfo/__init__.py b/Lib/zoneinfo/__init__.py index 81a2d5ea97be0..f5510ee049751 100644 --- a/Lib/zoneinfo/__init__.py +++ b/Lib/zoneinfo/__init__.py @@ -1,6 +1,7 @@ __all__ = [ "ZoneInfo", "reset_tzpath", + "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning", @@ -15,6 +16,7 @@ from ._zoneinfo import ZoneInfo reset_tzpath = _tzpath.reset_tzpath +available_timezones = _tzpath.available_timezones InvalidTZPathWarning = _tzpath.InvalidTZPathWarning diff --git a/Lib/zoneinfo/_tzpath.py b/Lib/zoneinfo/_tzpath.py index 8cff0b171bf32..c4c671d30dbdd 100644 --- a/Lib/zoneinfo/_tzpath.py +++ b/Lib/zoneinfo/_tzpath.py @@ -102,6 +102,71 @@ def _validate_tzfile_path(path, _base=_TEST_PATH): del _TEST_PATH +def available_timezones(): + """Returns a set containing all available time zones. + + .. caution:: + + This may attempt to open a large number of files, since the best way to + determine if a given file on the time zone search path is to open it + and check for the "magic string" at the beginning. + """ + from importlib import resources + + valid_zones = set() + + # Start with loading from the tzdata package if it exists: this has a + # pre-assembled list of zones that only requires opening one file. + try: + with resources.open_text("tzdata", "zones") as f: + for zone in f: + zone = zone.strip() + if zone: + valid_zones.add(zone) + except (ImportError, FileNotFoundError): + pass + + def valid_key(fpath): + try: + with open(fpath, "rb") as f: + return f.read(4) == b"TZif" + except Exception: # pragma: nocover + return False + + for tz_root in TZPATH: + if not os.path.exists(tz_root): + continue + + for root, dirnames, files in os.walk(tz_root): + if root == tz_root: + # right/ and posix/ are special directories and shouldn't be + # included in the output of available zones + if "right" in dirnames: + dirnames.remove("right") + if "posix" in dirnames: + dirnames.remove("posix") + + for file in files: + fpath = os.path.join(root, file) + + key = os.path.relpath(fpath, start=tz_root) + if os.sep != "/": # pragma: nocover + key = key.replace(os.sep, "/") + + if not key or key in valid_zones: + continue + + if valid_key(fpath): + valid_zones.add(key) + + if "posixrules" in valid_zones: + # posixrules is a special symlink-only time zone where it exists, it + # should not be included in the output + valid_zones.remove("posixrules") + + return valid_zones + + class InvalidTZPathWarning(RuntimeWarning): """Warning raised if an invalid path is specified in PYTHONTZPATH.""" diff --git a/Misc/NEWS.d/next/Library/2020-05-17-14-00-12.bpo-40536.FCpoRA.rst b/Misc/NEWS.d/next/Library/2020-05-17-14-00-12.bpo-40536.FCpoRA.rst new file mode 100644 index 0000000000000..ba7773bf61fa6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-17-14-00-12.bpo-40536.FCpoRA.rst @@ -0,0 +1,2 @@ +Added the :func:`~zoneinfo.available_timezones` function to the +:mod:`zoneinfo` module. Patch by Paul Ganssle. From webhook-mailer at python.org Sun May 17 23:38:40 2020 From: webhook-mailer at python.org (Shantanu) Date: Mon, 18 May 2020 03:38:40 -0000 Subject: [Python-checkins] bpo-40665: Use Argument Clinic for the bisect module (GH-20163) Message-ID: https://github.com/python/cpython/commit/3a855b26aed02abf87fc1163ad0d564dc3da1ea3 commit: 3a855b26aed02abf87fc1163ad0d564dc3da1ea3 branch: master author: Shantanu committer: GitHub date: 2020-05-17T20:38:35-07:00 summary: bpo-40665: Use Argument Clinic for the bisect module (GH-20163) files: A Misc/NEWS.d/next/Library/2020-05-17-21-56-38.bpo-40665.msB7u5.rst A Modules/clinic/_bisectmodule.c.h M Modules/_bisectmodule.c diff --git a/Misc/NEWS.d/next/Library/2020-05-17-21-56-38.bpo-40665.msB7u5.rst b/Misc/NEWS.d/next/Library/2020-05-17-21-56-38.bpo-40665.msB7u5.rst new file mode 100644 index 0000000000000..160b2ca75d31c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-17-21-56-38.bpo-40665.msB7u5.rst @@ -0,0 +1 @@ +Convert :mod:`bisect` to use Argument Clinic. \ No newline at end of file diff --git a/Modules/_bisectmodule.c b/Modules/_bisectmodule.c index 461a11f5099db..82d800d9a8790 100644 --- a/Modules/_bisectmodule.c +++ b/Modules/_bisectmodule.c @@ -6,6 +6,13 @@ Converted to C by Dmitry Vasiliev (dima at hlabs.spb.ru). #define PY_SSIZE_T_CLEAN #include "Python.h" +/*[clinic input] +module _bisect +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=4d56a2b2033b462b]*/ + +#include "clinic/_bisectmodule.c.h" + _Py_IDENTIFIER(insert); static inline Py_ssize_t @@ -44,69 +51,63 @@ internal_bisect_right(PyObject *list, PyObject *item, Py_ssize_t lo, Py_ssize_t return lo; } -static PyObject * -bisect_right(PyObject *self, PyObject *args, PyObject *kw) +/*[clinic input] +_bisect.bisect_right -> Py_ssize_t + + a: object + x: object + lo: Py_ssize_t = 0 + hi: Py_ssize_t(c_default='-1', accept={int, NoneType}) = None + +Return the index where to insert item x in list a, assuming a is sorted. + +The return value i is such that all e in a[:i] have e <= x, and all e in +a[i:] have e > x. So if x already appears in the list, i points just +beyond the rightmost x already there + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. +[clinic start generated code]*/ + +static Py_ssize_t +_bisect_bisect_right_impl(PyObject *module, PyObject *a, PyObject *x, + Py_ssize_t lo, Py_ssize_t hi) +/*[clinic end generated code: output=419e150cf1d2a235 input=e72212b282c83375]*/ { - PyObject *list, *item; - Py_ssize_t lo = 0; - Py_ssize_t hi = -1; - Py_ssize_t index; - static char *keywords[] = {"a", "x", "lo", "hi", NULL}; - - if (kw == NULL && PyTuple_GET_SIZE(args) == 2) { - list = PyTuple_GET_ITEM(args, 0); - item = PyTuple_GET_ITEM(args, 1); - } - else { - if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|nn:bisect_right", - keywords, &list, &item, &lo, &hi)) - return NULL; - } - index = internal_bisect_right(list, item, lo, hi); - if (index < 0) - return NULL; - return PyLong_FromSsize_t(index); + return internal_bisect_right(a, x, lo, hi); } -PyDoc_STRVAR(bisect_right_doc, -"bisect_right(a, x[, lo[, hi]]) -> index\n\ -\n\ -Return the index where to insert item x in list a, assuming a is sorted.\n\ -\n\ -The return value i is such that all e in a[:i] have e <= x, and all e in\n\ -a[i:] have e > x. So if x already appears in the list, i points just\n\ -beyond the rightmost x already there\n\ -\n\ -Optional args lo (default 0) and hi (default len(a)) bound the\n\ -slice of a to be searched.\n"); +/*[clinic input] +_bisect.insort_right + + a: object + x: object + lo: Py_ssize_t = 0 + hi: Py_ssize_t(c_default='-1', accept={int, NoneType}) = None + +Insert item x in list a, and keep it sorted assuming a is sorted. + +If x is already in a, insert it to the right of the rightmost x. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. +[clinic start generated code]*/ static PyObject * -insort_right(PyObject *self, PyObject *args, PyObject *kw) +_bisect_insort_right_impl(PyObject *module, PyObject *a, PyObject *x, + Py_ssize_t lo, Py_ssize_t hi) +/*[clinic end generated code: output=c2caa3d4cd02035a input=d1c45bfa68182669]*/ { - PyObject *list, *item, *result; - Py_ssize_t lo = 0; - Py_ssize_t hi = -1; - Py_ssize_t index; - static char *keywords[] = {"a", "x", "lo", "hi", NULL}; - - if (kw == NULL && PyTuple_GET_SIZE(args) == 2) { - list = PyTuple_GET_ITEM(args, 0); - item = PyTuple_GET_ITEM(args, 1); - } - else { - if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|nn:insort_right", - keywords, &list, &item, &lo, &hi)) - return NULL; - } - index = internal_bisect_right(list, item, lo, hi); + PyObject *result; + Py_ssize_t index = internal_bisect_right(a, x, lo, hi); if (index < 0) return NULL; - if (PyList_CheckExact(list)) { - if (PyList_Insert(list, index, item) < 0) + if (PyList_CheckExact(a)) { + if (PyList_Insert(a, index, x) < 0) return NULL; } else { - result = _PyObject_CallMethodId(list, &PyId_insert, "nO", index, item); + result = _PyObject_CallMethodId(a, &PyId_insert, "nO", index, x); if (result == NULL) return NULL; Py_DECREF(result); @@ -115,16 +116,6 @@ insort_right(PyObject *self, PyObject *args, PyObject *kw) Py_RETURN_NONE; } -PyDoc_STRVAR(insort_right_doc, -"insort_right(a, x[, lo[, hi]])\n\ -\n\ -Insert item x in list a, and keep it sorted assuming a is sorted.\n\ -\n\ -If x is already in a, insert it to the right of the rightmost x.\n\ -\n\ -Optional args lo (default 0) and hi (default len(a)) bound the\n\ -slice of a to be searched.\n"); - static inline Py_ssize_t internal_bisect_left(PyObject *list, PyObject *item, Py_ssize_t lo, Py_ssize_t hi) { @@ -161,67 +152,64 @@ internal_bisect_left(PyObject *list, PyObject *item, Py_ssize_t lo, Py_ssize_t h return lo; } -static PyObject * -bisect_left(PyObject *self, PyObject *args, PyObject *kw) + +/*[clinic input] +_bisect.bisect_left -> Py_ssize_t + + a: object + x: object + lo: Py_ssize_t = 0 + hi: Py_ssize_t(c_default='-1', accept={int, NoneType}) = None + +Return the index where to insert item x in list a, assuming a is sorted. + +The return value i is such that all e in a[:i] have e < x, and all e in +a[i:] have e >= x. So if x already appears in the list, i points just +before the leftmost x already there. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. +[clinic start generated code]*/ + +static Py_ssize_t +_bisect_bisect_left_impl(PyObject *module, PyObject *a, PyObject *x, + Py_ssize_t lo, Py_ssize_t hi) +/*[clinic end generated code: output=af82168bc2856f24 input=2bd90f34afe5609f]*/ { - PyObject *list, *item; - Py_ssize_t lo = 0; - Py_ssize_t hi = -1; - Py_ssize_t index; - static char *keywords[] = {"a", "x", "lo", "hi", NULL}; - - if (kw == NULL && PyTuple_GET_SIZE(args) == 2) { - list = PyTuple_GET_ITEM(args, 0); - item = PyTuple_GET_ITEM(args, 1); - } - else { - if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|nn:bisect_left", - keywords, &list, &item, &lo, &hi)) - return NULL; - } - index = internal_bisect_left(list, item, lo, hi); - if (index < 0) - return NULL; - return PyLong_FromSsize_t(index); + return internal_bisect_left(a, x, lo, hi); } -PyDoc_STRVAR(bisect_left_doc, -"bisect_left(a, x[, lo[, hi]]) -> index\n\ -\n\ -Return the index where to insert item x in list a, assuming a is sorted.\n\ -\n\ -The return value i is such that all e in a[:i] have e < x, and all e in\n\ -a[i:] have e >= x. So if x already appears in the list, i points just\n\ -before the leftmost x already there.\n\ -\n\ -Optional args lo (default 0) and hi (default len(a)) bound the\n\ -slice of a to be searched.\n"); + +/*[clinic input] +_bisect.insort_left + + a: object + x: object + lo: Py_ssize_t = 0 + hi: Py_ssize_t(c_default='-1', accept={int, NoneType}) = None + +Insert item x in list a, and keep it sorted assuming a is sorted. + +If x is already in a, insert it to the left of the leftmost x. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. +[clinic start generated code]*/ static PyObject * -insort_left(PyObject *self, PyObject *args, PyObject *kw) +_bisect_insort_left_impl(PyObject *module, PyObject *a, PyObject *x, + Py_ssize_t lo, Py_ssize_t hi) +/*[clinic end generated code: output=9e8356c0844a182b input=bc4583308bce00cc]*/ { - PyObject *list, *item, *result; - Py_ssize_t lo = 0; - Py_ssize_t hi = -1; - Py_ssize_t index; - static char *keywords[] = {"a", "x", "lo", "hi", NULL}; - - if (kw == NULL && PyTuple_GET_SIZE(args) == 2) { - list = PyTuple_GET_ITEM(args, 0); - item = PyTuple_GET_ITEM(args, 1); - } else { - if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|nn:insort_left", - keywords, &list, &item, &lo, &hi)) - return NULL; - } - index = internal_bisect_left(list, item, lo, hi); + PyObject *result; + Py_ssize_t index = internal_bisect_left(a, x, lo, hi); if (index < 0) return NULL; - if (PyList_CheckExact(list)) { - if (PyList_Insert(list, index, item) < 0) + if (PyList_CheckExact(a)) { + if (PyList_Insert(a, index, x) < 0) return NULL; } else { - result = _PyObject_CallMethodId(list, &PyId_insert, "nO", index, item); + result = _PyObject_CallMethodId(a, &PyId_insert, "nO", index, x); if (result == NULL) return NULL; Py_DECREF(result); @@ -230,25 +218,11 @@ insort_left(PyObject *self, PyObject *args, PyObject *kw) Py_RETURN_NONE; } -PyDoc_STRVAR(insort_left_doc, -"insort_left(a, x[, lo[, hi]])\n\ -\n\ -Insert item x in list a, and keep it sorted assuming a is sorted.\n\ -\n\ -If x is already in a, insert it to the left of the leftmost x.\n\ -\n\ -Optional args lo (default 0) and hi (default len(a)) bound the\n\ -slice of a to be searched.\n"); - static PyMethodDef bisect_methods[] = { - {"bisect_right", (PyCFunction)(void(*)(void))bisect_right, - METH_VARARGS|METH_KEYWORDS, bisect_right_doc}, - {"insort_right", (PyCFunction)(void(*)(void))insort_right, - METH_VARARGS|METH_KEYWORDS, insort_right_doc}, - {"bisect_left", (PyCFunction)(void(*)(void))bisect_left, - METH_VARARGS|METH_KEYWORDS, bisect_left_doc}, - {"insort_left", (PyCFunction)(void(*)(void))insort_left, - METH_VARARGS|METH_KEYWORDS, insort_left_doc}, + _BISECT_BISECT_RIGHT_METHODDEF + _BISECT_INSORT_RIGHT_METHODDEF + _BISECT_BISECT_LEFT_METHODDEF + _BISECT_INSORT_LEFT_METHODDEF {NULL, NULL} /* sentinel */ }; diff --git a/Modules/clinic/_bisectmodule.c.h b/Modules/clinic/_bisectmodule.c.h new file mode 100644 index 0000000000000..80ab7048f1428 --- /dev/null +++ b/Modules/clinic/_bisectmodule.c.h @@ -0,0 +1,306 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_bisect_bisect_right__doc__, +"bisect_right($module, /, a, x, lo=0, hi=None)\n" +"--\n" +"\n" +"Return the index where to insert item x in list a, assuming a is sorted.\n" +"\n" +"The return value i is such that all e in a[:i] have e <= x, and all e in\n" +"a[i:] have e > x. So if x already appears in the list, i points just\n" +"beyond the rightmost x already there\n" +"\n" +"Optional args lo (default 0) and hi (default len(a)) bound the\n" +"slice of a to be searched."); + +#define _BISECT_BISECT_RIGHT_METHODDEF \ + {"bisect_right", (PyCFunction)(void(*)(void))_bisect_bisect_right, METH_FASTCALL|METH_KEYWORDS, _bisect_bisect_right__doc__}, + +static Py_ssize_t +_bisect_bisect_right_impl(PyObject *module, PyObject *a, PyObject *x, + Py_ssize_t lo, Py_ssize_t hi); + +static PyObject * +_bisect_bisect_right(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"a", "x", "lo", "hi", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "bisect_right", 0}; + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *x; + Py_ssize_t lo = 0; + Py_ssize_t hi = -1; + Py_ssize_t _return_value; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + x = args[1]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + if (PyFloat_Check(args[2])) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + goto exit; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = PyNumber_Index(args[2]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + lo = ival; + } + if (!--noptargs) { + goto skip_optional_pos; + } + } + if (!_Py_convert_optional_to_ssize_t(args[3], &hi)) { + goto exit; + } +skip_optional_pos: + _return_value = _bisect_bisect_right_impl(module, a, x, lo, hi); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromSsize_t(_return_value); + +exit: + return return_value; +} + +PyDoc_STRVAR(_bisect_insort_right__doc__, +"insort_right($module, /, a, x, lo=0, hi=None)\n" +"--\n" +"\n" +"Insert item x in list a, and keep it sorted assuming a is sorted.\n" +"\n" +"If x is already in a, insert it to the right of the rightmost x.\n" +"\n" +"Optional args lo (default 0) and hi (default len(a)) bound the\n" +"slice of a to be searched."); + +#define _BISECT_INSORT_RIGHT_METHODDEF \ + {"insort_right", (PyCFunction)(void(*)(void))_bisect_insort_right, METH_FASTCALL|METH_KEYWORDS, _bisect_insort_right__doc__}, + +static PyObject * +_bisect_insort_right_impl(PyObject *module, PyObject *a, PyObject *x, + Py_ssize_t lo, Py_ssize_t hi); + +static PyObject * +_bisect_insort_right(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"a", "x", "lo", "hi", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "insort_right", 0}; + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *x; + Py_ssize_t lo = 0; + Py_ssize_t hi = -1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + x = args[1]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + if (PyFloat_Check(args[2])) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + goto exit; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = PyNumber_Index(args[2]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + lo = ival; + } + if (!--noptargs) { + goto skip_optional_pos; + } + } + if (!_Py_convert_optional_to_ssize_t(args[3], &hi)) { + goto exit; + } +skip_optional_pos: + return_value = _bisect_insort_right_impl(module, a, x, lo, hi); + +exit: + return return_value; +} + +PyDoc_STRVAR(_bisect_bisect_left__doc__, +"bisect_left($module, /, a, x, lo=0, hi=None)\n" +"--\n" +"\n" +"Return the index where to insert item x in list a, assuming a is sorted.\n" +"\n" +"The return value i is such that all e in a[:i] have e < x, and all e in\n" +"a[i:] have e >= x. So if x already appears in the list, i points just\n" +"before the leftmost x already there.\n" +"\n" +"Optional args lo (default 0) and hi (default len(a)) bound the\n" +"slice of a to be searched."); + +#define _BISECT_BISECT_LEFT_METHODDEF \ + {"bisect_left", (PyCFunction)(void(*)(void))_bisect_bisect_left, METH_FASTCALL|METH_KEYWORDS, _bisect_bisect_left__doc__}, + +static Py_ssize_t +_bisect_bisect_left_impl(PyObject *module, PyObject *a, PyObject *x, + Py_ssize_t lo, Py_ssize_t hi); + +static PyObject * +_bisect_bisect_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"a", "x", "lo", "hi", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "bisect_left", 0}; + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *x; + Py_ssize_t lo = 0; + Py_ssize_t hi = -1; + Py_ssize_t _return_value; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + x = args[1]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + if (PyFloat_Check(args[2])) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + goto exit; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = PyNumber_Index(args[2]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + lo = ival; + } + if (!--noptargs) { + goto skip_optional_pos; + } + } + if (!_Py_convert_optional_to_ssize_t(args[3], &hi)) { + goto exit; + } +skip_optional_pos: + _return_value = _bisect_bisect_left_impl(module, a, x, lo, hi); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromSsize_t(_return_value); + +exit: + return return_value; +} + +PyDoc_STRVAR(_bisect_insort_left__doc__, +"insort_left($module, /, a, x, lo=0, hi=None)\n" +"--\n" +"\n" +"Insert item x in list a, and keep it sorted assuming a is sorted.\n" +"\n" +"If x is already in a, insert it to the left of the leftmost x.\n" +"\n" +"Optional args lo (default 0) and hi (default len(a)) bound the\n" +"slice of a to be searched."); + +#define _BISECT_INSORT_LEFT_METHODDEF \ + {"insort_left", (PyCFunction)(void(*)(void))_bisect_insort_left, METH_FASTCALL|METH_KEYWORDS, _bisect_insort_left__doc__}, + +static PyObject * +_bisect_insort_left_impl(PyObject *module, PyObject *a, PyObject *x, + Py_ssize_t lo, Py_ssize_t hi); + +static PyObject * +_bisect_insort_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"a", "x", "lo", "hi", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "insort_left", 0}; + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *x; + Py_ssize_t lo = 0; + Py_ssize_t hi = -1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + x = args[1]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + if (PyFloat_Check(args[2])) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + goto exit; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = PyNumber_Index(args[2]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + lo = ival; + } + if (!--noptargs) { + goto skip_optional_pos; + } + } + if (!_Py_convert_optional_to_ssize_t(args[3], &hi)) { + goto exit; + } +skip_optional_pos: + return_value = _bisect_insort_left_impl(module, a, x, lo, hi); + +exit: + return return_value; +} +/*[clinic end generated code: output=bcbd6c77331a08f0 input=a9049054013a1b77]*/ From webhook-mailer at python.org Sun May 17 23:50:59 2020 From: webhook-mailer at python.org (qudongfang) Date: Mon, 18 May 2020 03:50:59 -0000 Subject: [Python-checkins] bpo-40651: Improve LRU recipe in the OrderedDict documentation (GH-#20139) Message-ID: https://github.com/python/cpython/commit/bb8635cc3bc3dd65996803849ee1a91cfbebae9c commit: bb8635cc3bc3dd65996803849ee1a91cfbebae9c branch: master author: qudongfang committer: GitHub date: 2020-05-17T20:50:51-07:00 summary: bpo-40651: Improve LRU recipe in the OrderedDict documentation (GH-#20139) files: M Doc/library/collections.rst diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index c9533a3cb8f48..549ac1bccadf5 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -1161,6 +1161,8 @@ variants of :func:`functools.lru_cache`:: return value def __setitem__(self, key, value): + if key in self: + self.move_to_end(key) super().__setitem__(key, value) if len(self) > self.maxsize: oldest = next(iter(self)) From webhook-mailer at python.org Mon May 18 01:08:01 2020 From: webhook-mailer at python.org (Shantanu) Date: Mon, 18 May 2020 05:08:01 -0000 Subject: [Python-checkins] bpo-13601: Mention stderr's line buffering in What's New (GH-20168) Message-ID: https://github.com/python/cpython/commit/d17f3d8315a3a775ab0807fc80acf92b1bd682f8 commit: d17f3d8315a3a775ab0807fc80acf92b1bd682f8 branch: master author: Shantanu committer: GitHub date: 2020-05-17T22:07:57-07:00 summary: bpo-13601: Mention stderr's line buffering in What's New (GH-20168) files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 479c33b4a7fa1..66e7c6461ecd7 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -542,6 +542,10 @@ most platforms. On Fedora and SuSE, it is equal to ``"lib64"`` on 64-bit platforms. (Contributed by Jan Mat?jek, Mat?j Cepl, Charalampos Stratakis and Victor Stinner in :issue:`1294959`.) +Previously, :attr:`sys.stderr` was block-buffered when non-interactive. Now +``stderr`` defaults to always being line-buffered. +(Contributed by Jendrik Seipp in :issue:`13601`.) + typing ------ From webhook-mailer at python.org Mon May 18 01:47:39 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Mon, 18 May 2020 05:47:39 -0000 Subject: [Python-checkins] bpo-31033: Improve the traceback for cancelled asyncio tasks (GH-19951) Message-ID: https://github.com/python/cpython/commit/da742ba826721da84140abc785856d4ccc2d787f commit: da742ba826721da84140abc785856d4ccc2d787f branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-17T22:47:31-07:00 summary: bpo-31033: Improve the traceback for cancelled asyncio tasks (GH-19951) When an asyncio.Task is cancelled, the exception traceback now starts with where the task was first interrupted. Previously, the traceback only had "depth one." files: A Misc/NEWS.d/next/Library/2020-05-06-02-33-00.bpo-31033.aX12pw.rst M Include/internal/pycore_pyerrors.h M Lib/asyncio/futures.py M Lib/asyncio/tasks.py M Lib/test/test_asyncio/test_tasks.py M Modules/_asynciomodule.c M Modules/clinic/_asynciomodule.c.h M Objects/genobject.c M Python/errors.c M setup.py diff --git a/Include/internal/pycore_pyerrors.h b/Include/internal/pycore_pyerrors.h index bae10561c9f5c..3290a37051e0f 100644 --- a/Include/internal/pycore_pyerrors.h +++ b/Include/internal/pycore_pyerrors.h @@ -14,6 +14,20 @@ static inline PyObject* _PyErr_Occurred(PyThreadState *tstate) return tstate->curexc_type; } +static inline void _PyErr_ClearExcState(_PyErr_StackItem *exc_state) +{ + PyObject *t, *v, *tb; + t = exc_state->exc_type; + v = exc_state->exc_value; + tb = exc_state->exc_traceback; + exc_state->exc_type = NULL; + exc_state->exc_value = NULL; + exc_state->exc_traceback = NULL; + Py_XDECREF(t); + Py_XDECREF(v); + Py_XDECREF(tb); +} + PyAPI_FUNC(void) _PyErr_Fetch( PyThreadState *tstate, @@ -36,6 +50,9 @@ PyAPI_FUNC(void) _PyErr_SetObject( PyObject *type, PyObject *value); +PyAPI_FUNC(void) _PyErr_ChainStackItem( + _PyErr_StackItem *exc_state); + PyAPI_FUNC(void) _PyErr_Clear(PyThreadState *tstate); PyAPI_FUNC(void) _PyErr_SetNone(PyThreadState *tstate, PyObject *exception); diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index 889f3e6eb86b0..bed4da52fd4d9 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -52,6 +52,8 @@ class Future: _loop = None _source_traceback = None _cancel_message = None + # A saved CancelledError for later chaining as an exception context. + _cancelled_exc = None # This field is used for a dual purpose: # - Its presence is a marker to declare that a class implements @@ -124,6 +126,21 @@ def get_loop(self): raise RuntimeError("Future object is not initialized.") return loop + def _make_cancelled_error(self): + """Create the CancelledError to raise if the Future is cancelled. + + This should only be called once when handling a cancellation since + it erases the saved context exception value. + """ + if self._cancel_message is None: + exc = exceptions.CancelledError() + else: + exc = exceptions.CancelledError(self._cancel_message) + exc.__context__ = self._cancelled_exc + # Remove the reference since we don't need this anymore. + self._cancelled_exc = None + return exc + def cancel(self, msg=None): """Cancel the future and schedule callbacks. @@ -175,9 +192,8 @@ def result(self): the future is done and has an exception set, this exception is raised. """ if self._state == _CANCELLED: - raise exceptions.CancelledError( - '' if self._cancel_message is None else self._cancel_message) - + exc = self._make_cancelled_error() + raise exc if self._state != _FINISHED: raise exceptions.InvalidStateError('Result is not ready.') self.__log_traceback = False @@ -194,8 +210,8 @@ def exception(self): InvalidStateError. """ if self._state == _CANCELLED: - raise exceptions.CancelledError( - '' if self._cancel_message is None else self._cancel_message) + exc = self._make_cancelled_error() + raise exc if self._state != _FINISHED: raise exceptions.InvalidStateError('Exception is not set.') self.__log_traceback = False diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index a3a0a33ee03da..21b98b6647bd9 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -270,8 +270,7 @@ def __step(self, exc=None): f'_step(): already done: {self!r}, {exc!r}') if self._must_cancel: if not isinstance(exc, exceptions.CancelledError): - exc = exceptions.CancelledError('' - if self._cancel_message is None else self._cancel_message) + exc = self._make_cancelled_error() self._must_cancel = False coro = self._coro self._fut_waiter = None @@ -293,11 +292,9 @@ def __step(self, exc=None): else: super().set_result(exc.value) except exceptions.CancelledError as exc: - if exc.args: - cancel_msg = exc.args[0] - else: - cancel_msg = None - super().cancel(msg=cancel_msg) # I.e., Future.cancel(self). + # Save the original exception so we can chain it later. + self._cancelled_exc = exc + super().cancel() # I.e., Future.cancel(self). except (KeyboardInterrupt, SystemExit) as exc: super().set_exception(exc) raise @@ -787,8 +784,7 @@ def _done_callback(fut): # Check if 'fut' is cancelled first, as # 'fut.exception()' will *raise* a CancelledError # instead of returning it. - exc = exceptions.CancelledError('' - if fut._cancel_message is None else fut._cancel_message) + exc = fut._make_cancelled_error() outer.set_exception(exc) return else: @@ -804,9 +800,12 @@ def _done_callback(fut): for fut in children: if fut.cancelled(): - # Check if 'fut' is cancelled first, as - # 'fut.exception()' will *raise* a CancelledError - # instead of returning it. + # Check if 'fut' is cancelled first, as 'fut.exception()' + # will *raise* a CancelledError instead of returning it. + # Also, since we're adding the exception return value + # to 'results' instead of raising it, don't bother + # setting __context__. This also lets us preserve + # calling '_make_cancelled_error()' at most once. res = exceptions.CancelledError( '' if fut._cancel_message is None else fut._cancel_message) @@ -820,8 +819,7 @@ def _done_callback(fut): # If gather is being cancelled we must propagate the # cancellation regardless of *return_exceptions* argument. # See issue 32684. - exc = exceptions.CancelledError('' - if fut._cancel_message is None else fut._cancel_message) + exc = fut._make_cancelled_error() outer.set_exception(exc) else: outer.set_result(results) diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 65bee526d2eca..63968e2a17894 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -10,6 +10,7 @@ import re import sys import textwrap +import traceback import types import unittest import weakref @@ -57,6 +58,22 @@ def format_coroutine(qualname, state, src, source_traceback, generator=False): return 'coro=<%s() %s at %s>' % (qualname, state, src) +def get_innermost_context(exc): + """ + Return information about the innermost exception context in the chain. + """ + depth = 0 + while True: + context = exc.__context__ + if context is None: + break + + exc = context + depth += 1 + + return (type(exc), exc.args, depth) + + class Dummy: def __repr__(self): @@ -111,9 +128,10 @@ async def coro(): self.assertEqual(t._cancel_message, None) t.cancel('my message') + self.assertEqual(t._cancel_message, 'my message') + with self.assertRaises(asyncio.CancelledError): self.loop.run_until_complete(t) - self.assertEqual(t._cancel_message, 'my message') def test_task_cancel_message_setter(self): async def coro(): @@ -123,10 +141,8 @@ async def coro(): t._cancel_message = 'my new message' self.assertEqual(t._cancel_message, 'my new message') - # Also check that the value is used for cancel(). with self.assertRaises(asyncio.CancelledError): self.loop.run_until_complete(t) - self.assertEqual(t._cancel_message, 'my new message') def test_task_del_collect(self): class Evil: @@ -548,8 +564,8 @@ async def task(): def test_cancel_with_message_then_future_result(self): # Test Future.result() after calling cancel() with a message. cases = [ - ((), ('',)), - ((None,), ('',)), + ((), ()), + ((None,), ()), (('my message',), ('my message',)), # Non-string values should roundtrip. ((5,), (5,)), @@ -573,13 +589,17 @@ async def coro(): with self.assertRaises(asyncio.CancelledError) as cm: loop.run_until_complete(task) exc = cm.exception - self.assertEqual(exc.args, expected_args) + self.assertEqual(exc.args, ()) + + actual = get_innermost_context(exc) + self.assertEqual(actual, + (asyncio.CancelledError, expected_args, 2)) def test_cancel_with_message_then_future_exception(self): # Test Future.exception() after calling cancel() with a message. cases = [ - ((), ('',)), - ((None,), ('',)), + ((), ()), + ((None,), ()), (('my message',), ('my message',)), # Non-string values should roundtrip. ((5,), (5,)), @@ -603,7 +623,11 @@ async def coro(): with self.assertRaises(asyncio.CancelledError) as cm: loop.run_until_complete(task) exc = cm.exception - self.assertEqual(exc.args, expected_args) + self.assertEqual(exc.args, ()) + + actual = get_innermost_context(exc) + self.assertEqual(actual, + (asyncio.CancelledError, expected_args, 2)) def test_cancel_with_message_before_starting_task(self): loop = asyncio.new_event_loop() @@ -623,7 +647,11 @@ async def coro(): with self.assertRaises(asyncio.CancelledError) as cm: loop.run_until_complete(task) exc = cm.exception - self.assertEqual(exc.args, ('my message',)) + self.assertEqual(exc.args, ()) + + actual = get_innermost_context(exc) + self.assertEqual(actual, + (asyncio.CancelledError, ('my message',), 2)) def test_cancel_yield(self): with self.assertWarns(DeprecationWarning): @@ -805,6 +833,66 @@ async def coro(): self.assertTrue(nested_task.cancelled()) self.assertTrue(fut.cancelled()) + def assert_text_contains(self, text, substr): + if substr not in text: + raise RuntimeError(f'text {substr!r} not found in:\n>>>{text}<<<') + + def test_cancel_traceback_for_future_result(self): + # When calling Future.result() on a cancelled task, check that the + # line of code that was interrupted is included in the traceback. + loop = asyncio.new_event_loop() + self.set_event_loop(loop) + + async def nested(): + # This will get cancelled immediately. + await asyncio.sleep(10) + + async def coro(): + task = self.new_task(loop, nested()) + await asyncio.sleep(0) + task.cancel() + await task # search target + + task = self.new_task(loop, coro()) + try: + loop.run_until_complete(task) + except asyncio.CancelledError: + tb = traceback.format_exc() + self.assert_text_contains(tb, "await asyncio.sleep(10)") + # The intermediate await should also be included. + self.assert_text_contains(tb, "await task # search target") + else: + self.fail('CancelledError did not occur') + + def test_cancel_traceback_for_future_exception(self): + # When calling Future.exception() on a cancelled task, check that the + # line of code that was interrupted is included in the traceback. + loop = asyncio.new_event_loop() + self.set_event_loop(loop) + + async def nested(): + # This will get cancelled immediately. + await asyncio.sleep(10) + + async def coro(): + task = self.new_task(loop, nested()) + await asyncio.sleep(0) + task.cancel() + done, pending = await asyncio.wait([task]) + task.exception() # search target + + task = self.new_task(loop, coro()) + try: + loop.run_until_complete(task) + except asyncio.CancelledError: + tb = traceback.format_exc() + self.assert_text_contains(tb, "await asyncio.sleep(10)") + # The intermediate await should also be included. + self.assert_text_contains(tb, + "task.exception() # search target") + else: + self.fail('CancelledError did not occur') + def test_stop_while_run_in_complete(self): def gen(): @@ -2391,15 +2479,14 @@ def cancelling_callback(_): def test_cancel_gather_2(self): cases = [ - ((), ('',)), - ((None,), ('',)), + ((), ()), + ((None,), ()), (('my message',), ('my message',)), # Non-string values should roundtrip. ((5,), (5,)), ] for cancel_args, expected_args in cases: with self.subTest(cancel_args=cancel_args): - loop = asyncio.new_event_loop() self.addCleanup(loop.close) @@ -2417,15 +2504,20 @@ async def main(): qwe = self.new_task(loop, test()) await asyncio.sleep(0.2) qwe.cancel(*cancel_args) - try: - await qwe - except asyncio.CancelledError as exc: - self.assertEqual(exc.args, expected_args) - else: - self.fail('gather did not propagate the cancellation ' - 'request') - - loop.run_until_complete(main()) + await qwe + + try: + loop.run_until_complete(main()) + except asyncio.CancelledError as exc: + self.assertEqual(exc.args, ()) + exc_type, exc_args, depth = get_innermost_context(exc) + self.assertEqual((exc_type, exc_args), + (asyncio.CancelledError, expected_args)) + # The exact traceback seems to vary in CI. + self.assertIn(depth, (2, 3)) + else: + self.fail('gather did not propagate the cancellation ' + 'request') def test_exception_traceback(self): # See http://bugs.python.org/issue28843 diff --git a/Misc/NEWS.d/next/Library/2020-05-06-02-33-00.bpo-31033.aX12pw.rst b/Misc/NEWS.d/next/Library/2020-05-06-02-33-00.bpo-31033.aX12pw.rst new file mode 100644 index 0000000000000..b1831e5ff8f89 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-02-33-00.bpo-31033.aX12pw.rst @@ -0,0 +1,2 @@ +When a :class:`asyncio.Task` is cancelled, the exception traceback +now chains all the way back to where the task was first interrupted. diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index ff1b2b8b909c7..1b6a579682430 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -1,4 +1,5 @@ #include "Python.h" +#include "pycore_pyerrors.h" // _PyErr_ClearExcState() #include // offsetof() @@ -72,7 +73,8 @@ typedef enum { int prefix##_log_tb; \ int prefix##_blocking; \ PyObject *dict; \ - PyObject *prefix##_weakreflist; + PyObject *prefix##_weakreflist; \ + _PyErr_StackItem prefix##_cancelled_exc_state; typedef struct { FutureObj_HEAD(fut) @@ -482,6 +484,7 @@ future_init(FutureObj *fut, PyObject *loop) Py_CLEAR(fut->fut_exception); Py_CLEAR(fut->fut_source_tb); Py_CLEAR(fut->fut_cancel_msg); + _PyErr_ClearExcState(&fut->fut_cancelled_exc_state); fut->fut_state = STATE_PENDING; fut->fut_log_tb = 0; @@ -601,9 +604,7 @@ create_cancelled_error(PyObject *msg) { PyObject *exc; if (msg == NULL || msg == Py_None) { - msg = PyUnicode_FromString(""); - exc = PyObject_CallOneArg(asyncio_CancelledError, msg); - Py_DECREF(msg); + exc = PyObject_CallNoArgs(asyncio_CancelledError); } else { exc = PyObject_CallOneArg(asyncio_CancelledError, msg); } @@ -623,6 +624,7 @@ future_get_result(FutureObj *fut, PyObject **result) { if (fut->fut_state == STATE_CANCELLED) { set_cancelled_error(fut->fut_cancel_msg); + _PyErr_ChainStackItem(&fut->fut_cancelled_exc_state); return -1; } @@ -777,6 +779,7 @@ FutureObj_clear(FutureObj *fut) Py_CLEAR(fut->fut_exception); Py_CLEAR(fut->fut_source_tb); Py_CLEAR(fut->fut_cancel_msg); + _PyErr_ClearExcState(&fut->fut_cancelled_exc_state); Py_CLEAR(fut->dict); return 0; } @@ -793,6 +796,12 @@ FutureObj_traverse(FutureObj *fut, visitproc visit, void *arg) Py_VISIT(fut->fut_source_tb); Py_VISIT(fut->fut_cancel_msg); Py_VISIT(fut->dict); + + _PyErr_StackItem *exc_state = &fut->fut_cancelled_exc_state; + Py_VISIT(exc_state->exc_type); + Py_VISIT(exc_state->exc_value); + Py_VISIT(exc_state->exc_traceback); + return 0; } @@ -858,6 +867,7 @@ _asyncio_Future_exception_impl(FutureObj *self) if (self->fut_state == STATE_CANCELLED) { set_cancelled_error(self->fut_cancel_msg); + _PyErr_ChainStackItem(&self->fut_cancelled_exc_state); return NULL; } @@ -1335,6 +1345,29 @@ FutureObj_get_state(FutureObj *fut, void *Py_UNUSED(ignored)) return ret; } +/*[clinic input] +_asyncio.Future._make_cancelled_error + +Create the CancelledError to raise if the Future is cancelled. + +This should only be called once when handling a cancellation since +it erases the context exception value. +[clinic start generated code]*/ + +static PyObject * +_asyncio_Future__make_cancelled_error_impl(FutureObj *self) +/*[clinic end generated code: output=a5df276f6c1213de input=ac6effe4ba795ecc]*/ +{ + PyObject *exc = create_cancelled_error(self->fut_cancel_msg); + _PyErr_StackItem *exc_state = &self->fut_cancelled_exc_state; + /* Transfer ownership of exc_value from exc_state to exc since we are + done with it. */ + PyException_SetContext(exc, exc_state->exc_value); + exc_state->exc_value = NULL; + + return exc; +} + /*[clinic input] _asyncio.Future._repr_info [clinic start generated code]*/ @@ -1461,6 +1494,7 @@ static PyMethodDef FutureType_methods[] = { _ASYNCIO_FUTURE_CANCELLED_METHODDEF _ASYNCIO_FUTURE_DONE_METHODDEF _ASYNCIO_FUTURE_GET_LOOP_METHODDEF + _ASYNCIO_FUTURE__MAKE_CANCELLED_ERROR_METHODDEF _ASYNCIO_FUTURE__REPR_INFO_METHODDEF {"__class_getitem__", future_cls_getitem, METH_O|METH_CLASS, NULL}, {NULL, NULL} /* Sentinel */ @@ -2232,6 +2266,24 @@ _asyncio_Task_all_tasks_impl(PyTypeObject *type, PyObject *loop) return res; } +/*[clinic input] +_asyncio.Task._make_cancelled_error + +Create the CancelledError to raise if the Task is cancelled. + +This should only be called once when handling a cancellation since +it erases the context exception value. +[clinic start generated code]*/ + +static PyObject * +_asyncio_Task__make_cancelled_error_impl(TaskObj *self) +/*[clinic end generated code: output=55a819e8b4276fab input=52c0e32de8e2f840]*/ +{ + FutureObj *fut = (FutureObj*)self; + return _asyncio_Future__make_cancelled_error_impl(fut); +} + + /*[clinic input] _asyncio.Task._repr_info [clinic start generated code]*/ @@ -2539,6 +2591,7 @@ static PyMethodDef TaskType_methods[] = { _ASYNCIO_TASK_CANCEL_METHODDEF _ASYNCIO_TASK_GET_STACK_METHODDEF _ASYNCIO_TASK_PRINT_STACK_METHODDEF + _ASYNCIO_TASK__MAKE_CANCELLED_ERROR_METHODDEF _ASYNCIO_TASK__REPR_INFO_METHODDEF _ASYNCIO_TASK_GET_NAME_METHODDEF _ASYNCIO_TASK_SET_NAME_METHODDEF @@ -2754,24 +2807,13 @@ task_step_impl(TaskObj *task, PyObject *exc) /* CancelledError */ PyErr_Fetch(&et, &ev, &tb); - PyObject *cancel_msg; - if (ev != NULL && PyExceptionInstance_Check(ev)) { - PyObject *exc_args = ((PyBaseExceptionObject*)ev)->args; - Py_ssize_t size = PyTuple_GET_SIZE(exc_args); - if (size > 0) { - cancel_msg = PyTuple_GET_ITEM(exc_args, 0); - } else { - cancel_msg = NULL; - } - } else { - cancel_msg = ev; - } - - Py_DECREF(et); - Py_XDECREF(tb); - Py_XDECREF(ev); + FutureObj *fut = (FutureObj*)task; + _PyErr_StackItem *exc_state = &fut->fut_cancelled_exc_state; + exc_state->exc_type = et; + exc_state->exc_value = ev; + exc_state->exc_traceback = tb; - return future_cancel((FutureObj*)task, cancel_msg); + return future_cancel(fut, NULL); } /* Some other exception; pop it and call Task.set_exception() */ diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h index 3f5023c33a580..d3e59a4bc7822 100644 --- a/Modules/clinic/_asynciomodule.c.h +++ b/Modules/clinic/_asynciomodule.c.h @@ -271,6 +271,27 @@ _asyncio_Future_get_loop(FutureObj *self, PyObject *Py_UNUSED(ignored)) return _asyncio_Future_get_loop_impl(self); } +PyDoc_STRVAR(_asyncio_Future__make_cancelled_error__doc__, +"_make_cancelled_error($self, /)\n" +"--\n" +"\n" +"Create the CancelledError to raise if the Future is cancelled.\n" +"\n" +"This should only be called once when handling a cancellation since\n" +"it erases the context exception value."); + +#define _ASYNCIO_FUTURE__MAKE_CANCELLED_ERROR_METHODDEF \ + {"_make_cancelled_error", (PyCFunction)_asyncio_Future__make_cancelled_error, METH_NOARGS, _asyncio_Future__make_cancelled_error__doc__}, + +static PyObject * +_asyncio_Future__make_cancelled_error_impl(FutureObj *self); + +static PyObject * +_asyncio_Future__make_cancelled_error(FutureObj *self, PyObject *Py_UNUSED(ignored)) +{ + return _asyncio_Future__make_cancelled_error_impl(self); +} + PyDoc_STRVAR(_asyncio_Future__repr_info__doc__, "_repr_info($self, /)\n" "--\n" @@ -414,6 +435,27 @@ _asyncio_Task_all_tasks(PyTypeObject *type, PyObject *const *args, Py_ssize_t na return return_value; } +PyDoc_STRVAR(_asyncio_Task__make_cancelled_error__doc__, +"_make_cancelled_error($self, /)\n" +"--\n" +"\n" +"Create the CancelledError to raise if the Task is cancelled.\n" +"\n" +"This should only be called once when handling a cancellation since\n" +"it erases the context exception value."); + +#define _ASYNCIO_TASK__MAKE_CANCELLED_ERROR_METHODDEF \ + {"_make_cancelled_error", (PyCFunction)_asyncio_Task__make_cancelled_error, METH_NOARGS, _asyncio_Task__make_cancelled_error__doc__}, + +static PyObject * +_asyncio_Task__make_cancelled_error_impl(TaskObj *self); + +static PyObject * +_asyncio_Task__make_cancelled_error(TaskObj *self, PyObject *Py_UNUSED(ignored)) +{ + return _asyncio_Task__make_cancelled_error_impl(self); +} + PyDoc_STRVAR(_asyncio_Task__repr_info__doc__, "_repr_info($self, /)\n" "--\n" @@ -870,4 +912,4 @@ _asyncio__leave_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, exit: return return_value; } -/*[clinic end generated code: output=6ed4cfda8fc516ad input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0e5c1eb8b692977b input=a9049054013a1b77]*/ diff --git a/Objects/genobject.c b/Objects/genobject.c index fb01e581f8ae1..40179cdbf7dbd 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -3,6 +3,7 @@ #include "Python.h" #include "pycore_ceval.h" // _PyEval_EvalFrame() #include "pycore_object.h" +#include "pycore_pyerrors.h" // _PyErr_ClearExcState() #include "pycore_pystate.h" // _PyThreadState_GET() #include "frameobject.h" #include "structmember.h" // PyMemberDef @@ -99,21 +100,6 @@ _PyGen_Finalize(PyObject *self) PyErr_Restore(error_type, error_value, error_traceback); } -static inline void -exc_state_clear(_PyErr_StackItem *exc_state) -{ - PyObject *t, *v, *tb; - t = exc_state->exc_type; - v = exc_state->exc_value; - tb = exc_state->exc_traceback; - exc_state->exc_type = NULL; - exc_state->exc_value = NULL; - exc_state->exc_traceback = NULL; - Py_XDECREF(t); - Py_XDECREF(v); - Py_XDECREF(tb); -} - static void gen_dealloc(PyGenObject *gen) { @@ -146,7 +132,7 @@ gen_dealloc(PyGenObject *gen) Py_CLEAR(gen->gi_code); Py_CLEAR(gen->gi_name); Py_CLEAR(gen->gi_qualname); - exc_state_clear(&gen->gi_exc_state); + _PyErr_ClearExcState(&gen->gi_exc_state); PyObject_GC_Del(gen); } @@ -286,7 +272,7 @@ gen_send_ex(PyGenObject *gen, PyObject *arg, int exc, int closing) if (!result || f->f_stacktop == NULL) { /* generator can't be rerun, so release the frame */ /* first clean reference cycle through stored exception traceback */ - exc_state_clear(&gen->gi_exc_state); + _PyErr_ClearExcState(&gen->gi_exc_state); gen->gi_frame->f_gen = NULL; gen->gi_frame = NULL; Py_DECREF(f); diff --git a/Python/errors.c b/Python/errors.c index f856a798eed1e..3b42c1120b8d0 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -512,6 +512,20 @@ _PyErr_ChainExceptions(PyObject *exc, PyObject *val, PyObject *tb) } } +void +_PyErr_ChainStackItem(_PyErr_StackItem *exc_state) +{ + if (exc_state->exc_type == NULL || exc_state->exc_type == Py_None) { + return; + } + Py_INCREF(exc_state->exc_type); + Py_XINCREF(exc_state->exc_value); + Py_XINCREF(exc_state->exc_traceback); + _PyErr_ChainExceptions(exc_state->exc_type, + exc_state->exc_value, + exc_state->exc_traceback); +} + static PyObject * _PyErr_FormatVFromCause(PyThreadState *tstate, PyObject *exception, const char *format, va_list vargs) diff --git a/setup.py b/setup.py index 0f92a9c010810..847cf2641fad8 100644 --- a/setup.py +++ b/setup.py @@ -853,7 +853,8 @@ def detect_simple_extensions(self): # _opcode module self.add(Extension('_opcode', ['_opcode.c'])) # asyncio speedups - self.add(Extension("_asyncio", ["_asynciomodule.c"])) + self.add(Extension("_asyncio", ["_asynciomodule.c"], + extra_compile_args=['-DPy_BUILD_CORE_MODULE'])) # _abc speedups self.add(Extension("_abc", ["_abc.c"])) # _queue module From webhook-mailer at python.org Mon May 18 02:21:42 2020 From: webhook-mailer at python.org (Kjell Braden) Date: Mon, 18 May 2020 06:21:42 -0000 Subject: [Python-checkins] bpo-39148: enable ipv6 for datagrams in Proactor (GH-19121) Message-ID: https://github.com/python/cpython/commit/442634c42fcaf31c636f693951a97734042c3e7b commit: 442634c42fcaf31c636f693951a97734042c3e7b branch: master author: Kjell Braden committer: GitHub date: 2020-05-17T23:21:30-07:00 summary: bpo-39148: enable ipv6 for datagrams in Proactor (GH-19121) Ifdef is not necessary, as AF_INET6 is supported from Windows Vista, and other code in overlapped.c uses AF_INET6 and is not ifdef'd. Change the raised exception so users are not fooled to think it comes from Windows API. Automerge-Triggered-By: @njsmith files: A Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst M Lib/test/test_asyncio/test_events.py M Modules/overlapped.c diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index aa4daca0e4e03..573df2d765887 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1208,7 +1208,7 @@ def test_server_close(self): ConnectionRefusedError, client.connect, ('127.0.0.1', port)) client.close() - def test_create_datagram_endpoint(self): + def _test_create_datagram_endpoint(self, local_addr, family): class TestMyDatagramProto(MyDatagramProto): def __init__(inner_self): super().__init__(loop=self.loop) @@ -1218,9 +1218,11 @@ def datagram_received(self, data, addr): self.transport.sendto(b'resp:'+data, addr) coro = self.loop.create_datagram_endpoint( - TestMyDatagramProto, local_addr=('127.0.0.1', 0)) + TestMyDatagramProto, local_addr=local_addr, family=family) s_transport, server = self.loop.run_until_complete(coro) - host, port = s_transport.get_extra_info('sockname') + sockname = s_transport.get_extra_info('sockname') + host, port = socket.getnameinfo( + sockname, socket.NI_NUMERICHOST|socket.NI_NUMERICSERV) self.assertIsInstance(s_transport, asyncio.Transport) self.assertIsInstance(server, TestMyDatagramProto) @@ -1254,6 +1256,13 @@ def datagram_received(self, data, addr): self.assertEqual('CLOSED', client.state) server.transport.close() + def test_create_datagram_endpoint(self): + self._test_create_datagram_endpoint(('127.0.0.1', 0), socket.AF_INET) + + @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 not supported or enabled') + def test_create_datagram_endpoint_ipv6(self): + self._test_create_datagram_endpoint(('::1', 0), socket.AF_INET6) + def test_create_datagram_endpoint_sock(self): sock = None local_address = ('127.0.0.1', 0) diff --git a/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst b/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst new file mode 100644 index 0000000000000..7c70dce1e7333 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst @@ -0,0 +1,3 @@ +Add IPv6 support to :mod:`asyncio` datagram endpoints in ProactorEventLoop. +Change the raised exception for unknown address families to ValueError +as it's not coming from Windows API. diff --git a/Modules/overlapped.c b/Modules/overlapped.c index a16797c47b5d6..df6282cba819b 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -670,7 +670,6 @@ make_ipv4_addr(const struct sockaddr_in *addr) return PyUnicode_FromString(buf); } -#ifdef ENABLE_IPV6 /* Convert IPv6 sockaddr to a Python str. */ static PyObject * @@ -683,7 +682,6 @@ make_ipv6_addr(const struct sockaddr_in6 *addr) } return PyUnicode_FromString(buf); } -#endif static PyObject* unparse_address(LPSOCKADDR Address, DWORD Length) @@ -701,7 +699,6 @@ unparse_address(LPSOCKADDR Address, DWORD Length) } return ret; } -#ifdef ENABLE_IPV6 case AF_INET6: { const struct sockaddr_in6 *a = (const struct sockaddr_in6 *)Address; PyObject *addrobj = make_ipv6_addr(a); @@ -716,9 +713,9 @@ unparse_address(LPSOCKADDR Address, DWORD Length) } return ret; } -#endif /* ENABLE_IPV6 */ default: { - return SetFromWindowsErr(ERROR_INVALID_PARAMETER); + PyErr_SetString(PyExc_ValueError, "recvfrom returned unsupported address family"); + return NULL; } } } From webhook-mailer at python.org Mon May 18 02:42:34 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 18 May 2020 06:42:34 -0000 Subject: [Python-checkins] bpo-39148: enable ipv6 for datagrams in Proactor (GH-19121) Message-ID: https://github.com/python/cpython/commit/94d9c5e5afdee4f46be9d9faaa39d6be40c34849 commit: 94d9c5e5afdee4f46be9d9faaa39d6be40c34849 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-17T23:42:25-07:00 summary: bpo-39148: enable ipv6 for datagrams in Proactor (GH-19121) Ifdef is not necessary, as AF_INET6 is supported from Windows Vista, and other code in overlapped.c uses AF_INET6 and is not ifdef'd. Change the raised exception so users are not fooled to think it comes from Windows API. Automerge-Triggered-By: @njsmith (cherry picked from commit 442634c42fcaf31c636f693951a97734042c3e7b) Co-authored-by: Kjell Braden files: A Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst M Lib/test/test_asyncio/test_events.py M Modules/overlapped.c diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index aec56da8e2250..37f1cb7e53a8d 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1204,7 +1204,7 @@ def test_server_close(self): ConnectionRefusedError, client.connect, ('127.0.0.1', port)) client.close() - def test_create_datagram_endpoint(self): + def _test_create_datagram_endpoint(self, local_addr, family): class TestMyDatagramProto(MyDatagramProto): def __init__(inner_self): super().__init__(loop=self.loop) @@ -1214,9 +1214,11 @@ def datagram_received(self, data, addr): self.transport.sendto(b'resp:'+data, addr) coro = self.loop.create_datagram_endpoint( - TestMyDatagramProto, local_addr=('127.0.0.1', 0)) + TestMyDatagramProto, local_addr=local_addr, family=family) s_transport, server = self.loop.run_until_complete(coro) - host, port = s_transport.get_extra_info('sockname') + sockname = s_transport.get_extra_info('sockname') + host, port = socket.getnameinfo( + sockname, socket.NI_NUMERICHOST|socket.NI_NUMERICSERV) self.assertIsInstance(s_transport, asyncio.Transport) self.assertIsInstance(server, TestMyDatagramProto) @@ -1250,6 +1252,13 @@ def datagram_received(self, data, addr): self.assertEqual('CLOSED', client.state) server.transport.close() + def test_create_datagram_endpoint(self): + self._test_create_datagram_endpoint(('127.0.0.1', 0), socket.AF_INET) + + @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 not supported or enabled') + def test_create_datagram_endpoint_ipv6(self): + self._test_create_datagram_endpoint(('::1', 0), socket.AF_INET6) + def test_create_datagram_endpoint_sock(self): sock = None local_address = ('127.0.0.1', 0) diff --git a/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst b/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst new file mode 100644 index 0000000000000..7c70dce1e7333 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst @@ -0,0 +1,3 @@ +Add IPv6 support to :mod:`asyncio` datagram endpoints in ProactorEventLoop. +Change the raised exception for unknown address families to ValueError +as it's not coming from Windows API. diff --git a/Modules/overlapped.c b/Modules/overlapped.c index 52ed0bc284bcc..b35f708f95bc2 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -670,7 +670,6 @@ make_ipv4_addr(const struct sockaddr_in *addr) return PyUnicode_FromString(buf); } -#ifdef ENABLE_IPV6 /* Convert IPv6 sockaddr to a Python str. */ static PyObject * @@ -683,7 +682,6 @@ make_ipv6_addr(const struct sockaddr_in6 *addr) } return PyUnicode_FromString(buf); } -#endif static PyObject* unparse_address(LPSOCKADDR Address, DWORD Length) @@ -701,7 +699,6 @@ unparse_address(LPSOCKADDR Address, DWORD Length) } return ret; } -#ifdef ENABLE_IPV6 case AF_INET6: { const struct sockaddr_in6 *a = (const struct sockaddr_in6 *)Address; PyObject *addrobj = make_ipv6_addr(a); @@ -716,9 +713,9 @@ unparse_address(LPSOCKADDR Address, DWORD Length) } return ret; } -#endif /* ENABLE_IPV6 */ default: { - return SetFromWindowsErr(ERROR_INVALID_PARAMETER); + PyErr_SetString(PyExc_ValueError, "recvfrom returned unsupported address family"); + return NULL; } } } From webhook-mailer at python.org Mon May 18 03:56:55 2020 From: webhook-mailer at python.org (Nathaniel J. Smith) Date: Mon, 18 May 2020 07:56:55 -0000 Subject: [Python-checkins] bpo-39148: fixup to account for IPV6_ENABLED being moved (GH-20170) Message-ID: https://github.com/python/cpython/commit/58205a0217e91232cc1e945dbfe4e387d636eb76 commit: 58205a0217e91232cc1e945dbfe4e387d636eb76 branch: master author: Nathaniel J. Smith committer: GitHub date: 2020-05-18T00:56:47-07:00 summary: bpo-39148: fixup to account for IPV6_ENABLED being moved (GH-20170) files: M Lib/test/test_asyncio/test_events.py diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 573df2d765887..0fb361884185e 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1259,7 +1259,7 @@ def datagram_received(self, data, addr): def test_create_datagram_endpoint(self): self._test_create_datagram_endpoint(('127.0.0.1', 0), socket.AF_INET) - @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 not supported or enabled') + @unittest.skipUnless(socket_helper.IPV6_ENABLED, 'IPv6 not supported or enabled') def test_create_datagram_endpoint_ipv6(self): self._test_create_datagram_endpoint(('::1', 0), socket.AF_INET6) From webhook-mailer at python.org Mon May 18 04:33:04 2020 From: webhook-mailer at python.org (Ned Deily) Date: Mon, 18 May 2020 08:33:04 -0000 Subject: [Python-checkins] bpo-34956: Fix macOS _tkinter use of Tcl/Tk in /Library/Frameworks (GH-20171) Message-ID: https://github.com/python/cpython/commit/1731d6da263e9a2d6e783e87a8a5d5ce58fda46d commit: 1731d6da263e9a2d6e783e87a8a5d5ce58fda46d branch: master author: Ned Deily committer: GitHub date: 2020-05-18T04:32:38-04:00 summary: bpo-34956: Fix macOS _tkinter use of Tcl/Tk in /Library/Frameworks (GH-20171) _tkinter now builds and links with non-system Tcl and Tk frameworks if they are installed in /Library/Frameworks as had been the case on older releases of macOS. If a macOS SDK is explicitly configured, by using ./configure --enable-universalsdk= or -isysroot, only a Library/Frameworks directory in the SDK itself is searched. The default behavior can still be overridden with configure --with-tcltk-includes and --with-tcltk-libs. files: A Misc/NEWS.d/next/macOS/2020-05-18-02-43-11.bpo-34956.35IcGF.rst M setup.py diff --git a/Misc/NEWS.d/next/macOS/2020-05-18-02-43-11.bpo-34956.35IcGF.rst b/Misc/NEWS.d/next/macOS/2020-05-18-02-43-11.bpo-34956.35IcGF.rst new file mode 100644 index 0000000000000..6ad9c1ac93355 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-05-18-02-43-11.bpo-34956.35IcGF.rst @@ -0,0 +1,6 @@ +_tkinter now builds and links with non-system Tcl and Tk frameworks if they +are installed in /Library/Frameworks as had been the case on older releases +of macOS. If a macOS SDK is explicitly configured, by using ./configure +--enable-universalsdk= or -isysroot, only a Library/Frameworks directory in +the SDK itself is searched. The default behavior can still be overridden with +configure --with-tcltk-includes and --with-tcltk-libs. diff --git a/setup.py b/setup.py index 847cf2641fad8..a220f366e2509 100644 --- a/setup.py +++ b/setup.py @@ -150,7 +150,9 @@ def sysroot_paths(make_vars, subdirs): break return dirs + MACOS_SDK_ROOT = None +MACOS_SDK_SPECIFIED = None def macosx_sdk_root(): """Return the directory of the current macOS SDK. @@ -162,8 +164,9 @@ def macosx_sdk_root(): (The SDK may be supplied via Xcode or via the Command Line Tools). The SDK paths used by Apple-supplied tool chains depend on the setting of various variables; see the xcrun man page for more info. + Also sets MACOS_SDK_SPECIFIED for use by macosx_sdk_specified(). """ - global MACOS_SDK_ROOT + global MACOS_SDK_ROOT, MACOS_SDK_SPECIFIED # If already called, return cached result. if MACOS_SDK_ROOT: @@ -173,8 +176,10 @@ def macosx_sdk_root(): m = re.search(r'-isysroot\s*(\S+)', cflags) if m is not None: MACOS_SDK_ROOT = m.group(1) + MACOS_SDK_SPECIFIED = MACOS_SDK_ROOT != '/' else: MACOS_SDK_ROOT = '/' + MACOS_SDK_SPECIFIED = False cc = sysconfig.get_config_var('CC') tmpfile = '/tmp/setup_sdk_root.%d' % os.getpid() try: @@ -203,6 +208,28 @@ def macosx_sdk_root(): return MACOS_SDK_ROOT +def macosx_sdk_specified(): + """Returns true if an SDK was explicitly configured. + + True if an SDK was selected at configure time, either by specifying + --enable-universalsdk=(something other than no or /) or by adding a + -isysroot option to CFLAGS. In some cases, like when making + decisions about macOS Tk framework paths, we need to be able to + know whether the user explicitly asked to build with an SDK versus + the implicit use of an SDK when header files are no longer + installed on a running system by the Command Line Tools. + """ + global MACOS_SDK_SPECIFIED + + # If already called, return cached result. + if MACOS_SDK_SPECIFIED: + return MACOS_SDK_SPECIFIED + + # Find the sdk root and set MACOS_SDK_SPECIFIED + macosx_sdk_root() + return MACOS_SDK_SPECIFIED + + def is_macosx_sdk_path(path): """ Returns True if 'path' can be located in an OSX SDK @@ -1830,31 +1857,73 @@ def detect_tkinter_explicitly(self): return True def detect_tkinter_darwin(self): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. + # Build default _tkinter on macOS using Tcl and Tk frameworks. + # + # The macOS native Tk (AKA Aqua Tk) and Tcl are most commonly + # built and installed as macOS framework bundles. However, + # for several reasons, we cannot take full advantage of the + # Apple-supplied compiler chain's -framework options here. + # Instead, we need to find and pass to the compiler the + # absolute paths of the Tcl and Tk headers files we want to use + # and the absolute path to the directory containing the Tcl + # and Tk frameworks for linking. + # + # We want to handle here two common use cases on macOS: + # 1. Build and link with system-wide third-party or user-built + # Tcl and Tk frameworks installed in /Library/Frameworks. + # 2. Build and link using a user-specified macOS SDK so that the + # built Python can be exported to other systems. In this case, + # search only the SDK's /Library/Frameworks (normally empty) + # and /System/Library/Frameworks. + # + # Any other use case should be able to be handled explicitly by + # using the options described above in detect_tkinter_explicitly(). + # In particular it would be good to handle here the case where + # you want to build and link with a framework build of Tcl and Tk + # that is not in /Library/Frameworks, say, in your private + # $HOME/Library/Frameworks directory or elsewhere. It turns + # out to be difficult to make that work automtically here + # without bringing into play more tools and magic. That case + # can be hamdled using a recipe with the right arguments + # to detect_tkinter_explicitly(). + # + # Note also that the fallback case here is to try to use the + # Apple-supplied Tcl and Tk frameworks in /System/Library but + # be forewarned that they are deprecated by Apple and typically + # out-of-date and buggy; their use should be avoided if at + # all possible by installing a newer version of Tcl and Tk in + # /Library/Frameworks before bwfore building Python without + # an explicit SDK or by configuring build arguments explicitly. + from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - sysroot = macosx_sdk_root() + sysroot = macosx_sdk_root() # path to the SDK or '/' - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! + if macosx_sdk_specified(): + # Use case #2: an SDK other than '/' was specified. + # Only search there. + framework_dirs = [ + join(sysroot, 'Library', 'Frameworks'), + join(sysroot, 'System', 'Library', 'Frameworks'), + ] + else: + # Use case #1: no explicit SDK selected. + # Search the local system-wide /Library/Frameworks, + # not the one in the default SDK, othewise fall back to + # /System/Library/Frameworks whose header files may be in + # the default SDK or, on older systems, actually installed. + framework_dirs = [ + join('/', 'Library', 'Frameworks'), + join(sysroot, 'System', 'Library', 'Frameworks'), + ] + + # Find the directory that contains the Tcl.framework and + # Tk.framework bundles. for F in framework_dirs: # both Tcl.framework and Tk.framework should be present - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break + if not exists(join(F, fw + '.framework')): + break else: # ok, F is now directory with both frameworks. Continure # building @@ -1864,24 +1933,16 @@ def detect_tkinter_darwin(self): # will now resume. return False - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # include_dirs = [ join(F, fw + '.framework', H) for fw in ('Tcl', 'Tk') - for H in ('Headers', 'Versions/Current/PrivateHeaders') + for H in ('Headers',) ] - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] + # Add the base framework directory as well + compile_args = ['-F', F] - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. + # Do not build tkinter for archs that this Tk was not built with. cflags = sysconfig.get_config_vars('CFLAGS')[0] archs = re.findall(r'-arch\s+(\w+)', cflags) @@ -1889,13 +1950,9 @@ def detect_tkinter_darwin(self): if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) - # Note: cannot use os.popen or subprocess here, that - # requires extensions that are not available here. - if is_macosx_sdk_path(F): - run_command("file %s/Tk.framework/Tk | grep 'for architecture' > %s"%(os.path.join(sysroot, F[1:]), tmpfile)) - else: - run_command("file %s/Tk.framework/Tk | grep 'for architecture' > %s"%(F, tmpfile)) - + run_command( + "file {}/Tk.framework/Tk | grep 'for architecture' > {}".format(F, tmpfile) + ) with open(tmpfile) as fp: detected_archs = [] for ln in fp: @@ -1904,16 +1961,26 @@ def detect_tkinter_darwin(self): detected_archs.append(ln.split()[-1]) os.unlink(tmpfile) + arch_args = [] for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) + arch_args.append('-arch') + arch_args.append(a) + + compile_args += arch_args + link_args = [','.join(['-Wl', '-F', F, '-framework', 'Tcl', '-framework', 'Tk']), *arch_args] + + # The X11/xlib.h file bundled in the Tk sources can cause function + # prototype warnings from the compiler. Since we cannot easily fix + # that, suppress the warnings here instead. + if '-Wstrict-prototypes' in cflags.split(): + compile_args.append('-Wno-strict-prototypes') self.add(Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], define_macros=[('WITH_APPINIT', 1)], include_dirs=include_dirs, libraries=[], - extra_compile_args=frameworks[2:], - extra_link_args=frameworks)) + extra_compile_args=compile_args, + extra_link_args=link_args)) return True def detect_tkinter(self): From webhook-mailer at python.org Mon May 18 09:17:26 2020 From: webhook-mailer at python.org (Ned Deily) Date: Mon, 18 May 2020 13:17:26 -0000 Subject: [Python-checkins] bpo-26317: Support OBJC and OBJCXX configure command line variables (GH-20176) Message-ID: https://github.com/python/cpython/commit/0da546665075aefbb476e192ed64122d340164f4 commit: 0da546665075aefbb476e192ed64122d340164f4 branch: master author: Ned Deily committer: GitHub date: 2020-05-18T09:17:22-04:00 summary: bpo-26317: Support OBJC and OBJCXX configure command line variables (GH-20176) Add support to the configure script for OBJC and OBJCXX command line options so that the macOS builds can use the clang compiler for the macOS-specific Objective C source files. This allows third-party compilers, like GNU gcc, to be used to build the rest of the project since some of the Objective C system header files are not compilable by GNU gcc. Co-authored-by: Jeffrey Kintscher Co-authored-by: Terry Jan Reedy files: A Misc/NEWS.d/next/Build/2019-05-14-05-35-14.bpo-26317.no8mw-.rst M Doc/whatsnew/3.9.rst M Mac/Makefile.in M Mac/PythonLauncher/Makefile.in M Makefile.pre.in M Misc/ACKS M aclocal.m4 M configure M configure.ac M setup.py diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 66e7c6461ecd7..9e42e9db0ca35 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -892,6 +892,14 @@ Build Changes functions are now required to build Python. (Contributed by Victor Stinner in :issue:`39395`.) +* The ``OBJC`` and ``OBJCXX`` standard command line options for the configure + script are now supported. This allows building fot macOS with a third-party + compiler, like GNU `gcc`, for everything except for the several + system-specific source files that need system headers that require the + Apple-supplied compile chain (e.g. the ``_scproxy`` helper module and + ``PythonLauncher.app``). + (Contributed by Jeffrey Kintscher in :issue:`26317`.) + C API Changes ============= diff --git a/Mac/Makefile.in b/Mac/Makefile.in index 0b32673323a0d..db59fc5d7f532 100644 --- a/Mac/Makefile.in +++ b/Mac/Makefile.in @@ -21,6 +21,7 @@ PYTHONFRAMEWORK=@PYTHONFRAMEWORK@ PYTHONFRAMEWORKIDENTIFIER=@PYTHONFRAMEWORKIDENTIFIER@ LIPO_32BIT_FLAGS=@LIPO_32BIT_FLAGS@ CC=@CC@ +OBJC=@OBJC@ MACOSX_DEPLOYMENT_TARGET=@CONFIGURE_MACOSX_DEPLOYMENT_TARGET@ export MACOSX_DEPLOYMENT_TARGET diff --git a/Mac/PythonLauncher/Makefile.in b/Mac/PythonLauncher/Makefile.in index 4c05f26e8358b..1553b75905157 100644 --- a/Mac/PythonLauncher/Makefile.in +++ b/Mac/PythonLauncher/Makefile.in @@ -1,5 +1,7 @@ CC=@CC@ LD=@CC@ +OBJC=@OBJC@ +OBJCFLAFS=@OBJCFLAGS@ BASECFLAGS=@BASECFLAGS@ OPT=@OPT@ CFLAGS=@CFLAGS@ $(BASECFLAGS) $(OPT) @@ -52,25 +54,25 @@ Python\ Launcher.app: Info.plist \ cp -R $(srcdir)/English.lproj "Python Launcher.app/Contents/Resources" FileSettings.o: $(srcdir)/FileSettings.m - $(CC) $(CFLAGS) -o $@ -c $(srcdir)/FileSettings.m + $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/FileSettings.m MyAppDelegate.o: $(srcdir)/MyAppDelegate.m - $(CC) $(CFLAGS) -o $@ -c $(srcdir)/MyAppDelegate.m + $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/MyAppDelegate.m MyDocument.o: $(srcdir)/MyDocument.m - $(CC) $(CFLAGS) -o $@ -c $(srcdir)/MyDocument.m + $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/MyDocument.m PreferencesWindowController.o: $(srcdir)/PreferencesWindowController.m - $(CC) $(CFLAGS) -o $@ -c $(srcdir)/PreferencesWindowController.m + $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/PreferencesWindowController.m doscript.o: $(srcdir)/doscript.m - $(CC) $(CFLAGS) -o $@ -c $(srcdir)/doscript.m + $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/doscript.m main.o: $(srcdir)/main.m - $(CC) $(CFLAGS) -o $@ -c $(srcdir)/main.m + $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/main.m Python\ Launcher: $(OBJECTS) - $(CC) $(LDFLAGS) -o "Python Launcher" $(OBJECTS) -framework AppKit -framework Carbon + $(OBJC) $(LDFLAGS) -o "Python Launcher" $(OBJECTS) -framework AppKit -framework Carbon Info.plist: $(srcdir)/Info.plist.in sed 's/%VERSION%/'"`$(RUNSHARED) $(BUILDPYTHON) -c 'import platform; print(platform.python_version())'`"'/g' < $(srcdir)/Info.plist.in > Info.plist diff --git a/Makefile.pre.in b/Makefile.pre.in index dbfd805f1a02f..339cbfd56cf8e 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -35,6 +35,7 @@ abs_builddir= @abs_builddir@ CC= @CC@ CXX= @CXX@ +OBJC= @OBJC@ MAINCC= @MAINCC@ LINKCC= @LINKCC@ AR= @AR@ @@ -613,12 +614,22 @@ $(srcdir)/Modules/_blake2/blake2s_impl.c: $(srcdir)/Modules/_blake2/blake2b_impl $(PYTHON_FOR_REGEN) $(srcdir)/Modules/_blake2/blake2b2s.py $(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py -f $@ +# _scproxy.o needs to be built outside of setup.py so that we can force +# the use of the OBJC compiler when the CC compiler is different. For +# example, it allows _scproxy.c to be compiled using the clang compiler +# while the rest of the project uses the GNU C compiler. +# +# see issue #26317 for details + at SCPROXY@Modules/_scproxy.o: $(srcdir)/Modules/_scproxy.c $(srcdir)/Include/Python.h + at SCPROXY@ $(OBJC) -c $(CCSHARED) $(PY_CORE_CFLAGS) -o $@ $< + at NOSCPROXY@.PHONY: Modules/_scproxy.o + # Build the shared modules # Under GNU make, MAKEFLAGS are sorted and normalized; the 's' for # -s, --silent or --quiet is always the first char. # Under BSD make, MAKEFLAGS might be " -s -v x=y". # Ignore macros passed by GNU make, passed after -- -sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o +sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o Modules/_scproxy.o @case "`echo X $$MAKEFLAGS | sed 's/^X //;s/ -- .*//'`" in \ *\ -s*|s*) quiet="-q";; \ *) quiet="";; \ diff --git a/Misc/ACKS b/Misc/ACKS index 6511383fa25d7..86b687b825ff2 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -882,6 +882,7 @@ Sam Kimbrel Tomohiko Kinebuchi James King W. Trevor King +Jeffrey Kintscher Paul Kippes Steve Kirsch Sebastian Kirsche diff --git a/Misc/NEWS.d/next/Build/2019-05-14-05-35-14.bpo-26317.no8mw-.rst b/Misc/NEWS.d/next/Build/2019-05-14-05-35-14.bpo-26317.no8mw-.rst new file mode 100644 index 0000000000000..4072a4a7cd2b5 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-05-14-05-35-14.bpo-26317.no8mw-.rst @@ -0,0 +1 @@ +The OBJC and OBJCXX standard command line options for the configure script are now supported. This allows building fot macOS with a third-party compiler, like GNU gcc, for everything except for the several system-specific source files that need system headers that require the Apple-supplied compile chain (e.g. the _scproxy helper module and PythonLauncher.app). Patch by Jeffrey Kintscher. diff --git a/aclocal.m4 b/aclocal.m4 index b5f9cb0e8da44..3b865c34602a5 100644 --- a/aclocal.m4 +++ b/aclocal.m4 @@ -55,7 +55,7 @@ dnl dnl See the "Since" comment for each macro you use to see what version dnl of the macros you require. m4_defun([PKG_PREREQ], -[m4_define([PKG_MACROS_VERSION], [0.29.1]) +[m4_define([PKG_MACROS_VERSION], [0.29.2]) m4_if(m4_version_compare(PKG_MACROS_VERSION, [$1]), -1, [m4_fatal([pkg.m4 version $1 or higher is required but ]PKG_MACROS_VERSION[ found])]) ])dnl PKG_PREREQ @@ -156,7 +156,7 @@ AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl pkg_failed=no -AC_MSG_CHECKING([for $1]) +AC_MSG_CHECKING([for $2]) _PKG_CONFIG([$1][_CFLAGS], [cflags], [$2]) _PKG_CONFIG([$1][_LIBS], [libs], [$2]) @@ -166,11 +166,11 @@ and $1[]_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details.]) if test $pkg_failed = yes; then - AC_MSG_RESULT([no]) + AC_MSG_RESULT([no]) _PKG_SHORT_ERRORS_SUPPORTED if test $_pkg_short_errors_supported = yes; then $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` - else + else $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` fi # Put the nasty error message in config.log where it belongs @@ -187,7 +187,7 @@ installed software in a non-standard prefix. _PKG_TEXT])[]dnl ]) elif test $pkg_failed = untried; then - AC_MSG_RESULT([no]) + AC_MSG_RESULT([no]) m4_default([$4], [AC_MSG_FAILURE( [The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full diff --git a/configure b/configure index 56d66d0235289..4242d5c60ff56 100755 --- a/configure +++ b/configure @@ -716,6 +716,14 @@ MULTIARCH_CPPFLAGS PLATFORM_TRIPLET MULTIARCH ac_ct_CXX +NOSCPROXY +SCPROXY +ac_ct_OBJCXX +OBJCXXFLAGS +OBJCXX +ac_ct_OBJC +OBJCFLAGS +OBJC MAINCC CXX SED @@ -860,6 +868,10 @@ LDFLAGS LIBS CPPFLAGS CPP +OBJC +OBJCFLAGS +OBJCXX +OBJCXXFLAGS PROFILE_TASK PKG_CONFIG PKG_CONFIG_PATH @@ -1599,6 +1611,10 @@ Some influential environment variables: CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory CPP C preprocessor + OBJC Objective C compiler command + OBJCFLAGS Objective C compiler flags + OBJCXX Objective C++ compiler command + OBJCXXFLAGS Objective C++ compiler flags PROFILE_TASK Python args for PGO generation task PKG_CONFIG path to pkg-config utility @@ -1762,6 +1778,82 @@ fi } # ac_fn_c_try_cpp +# ac_fn_objc_try_compile LINENO +# ----------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_objc_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_objc_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_objc_try_compile + +# ac_fn_objcxx_try_compile LINENO +# ------------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_objcxx_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_objcxx_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_objcxx_try_compile + # ac_fn_c_try_link LINENO # ----------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. @@ -4641,6 +4733,525 @@ $as_echo "$ac_cv_path_SED" >&6; } + +if test "$ac_sys_system" = "Darwin" +then + # MacOSX requires an Objective C compiler to + # build some Mac-specific code. + ac_ext=m +ac_cpp='$OBJCPP $CPPFLAGS' +ac_compile='$OBJC -c $OBJCFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$OBJC -o conftest$ac_exeext $OBJCFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_objc_compiler_gnu +if test -n "$ac_tool_prefix"; then + for ac_prog in gcc objcc objc cc CC + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OBJC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OBJC"; then + ac_cv_prog_OBJC="$OBJC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_OBJC="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +OBJC=$ac_cv_prog_OBJC +if test -n "$OBJC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJC" >&5 +$as_echo "$OBJC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$OBJC" && break + done +fi +if test -z "$OBJC"; then + ac_ct_OBJC=$OBJC + for ac_prog in gcc objcc objc cc CC +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OBJC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_OBJC"; then + ac_cv_prog_ac_ct_OBJC="$ac_ct_OBJC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_OBJC="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_OBJC=$ac_cv_prog_ac_ct_OBJC +if test -n "$ac_ct_OBJC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJC" >&5 +$as_echo "$ac_ct_OBJC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_OBJC" && break +done + + if test "x$ac_ct_OBJC" = x; then + OBJC="gcc" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OBJC=$ac_ct_OBJC + fi +fi + +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for Objective C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU Objective C compiler" >&5 +$as_echo_n "checking whether we are using the GNU Objective C compiler... " >&6; } +if ${ac_cv_objc_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_objc_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_objc_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objc_compiler_gnu" >&5 +$as_echo "$ac_cv_objc_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GOBJC=yes +else + GOBJC= +fi +ac_test_OBJCFLAGS=${OBJCFLAGS+set} +ac_save_OBJCFLAGS=$OBJCFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $OBJC accepts -g" >&5 +$as_echo_n "checking whether $OBJC accepts -g... " >&6; } +if ${ac_cv_prog_objc_g+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_save_objc_werror_flag=$ac_objc_werror_flag + ac_objc_werror_flag=yes + ac_cv_prog_objc_g=no + OBJCFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_objc_try_compile "$LINENO"; then : + ac_cv_prog_objc_g=yes +else + OBJCFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_objc_try_compile "$LINENO"; then : + +else + ac_objc_werror_flag=$ac_save_objc_werror_flag + OBJCFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_objc_try_compile "$LINENO"; then : + ac_cv_prog_objc_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_objc_werror_flag=$ac_save_objc_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_objc_g" >&5 +$as_echo "$ac_cv_prog_objc_g" >&6; } +if test "$ac_test_OBJCFLAGS" = set; then + OBJCFLAGS=$ac_save_OBJCFLAGS +elif test $ac_cv_prog_objc_g = yes; then + if test "$GOBJC" = yes; then + OBJCFLAGS="-g -O2" + else + OBJCFLAGS="-g" + fi +else + if test "$GOBJC" = yes; then + OBJCFLAGS="-O2" + else + OBJCFLAGS= + fi +fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + ac_ext=mm +ac_cpp='$OBJCXXCPP $CPPFLAGS' +ac_compile='$OBJCXX -c $OBJCXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$OBJCXX -o conftest$ac_exeext $OBJCXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_objcxx_compiler_gnu +if test -n "$ac_tool_prefix"; then + for ac_prog in g++ objc++ objcxx c++ CXX + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OBJCXX+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OBJCXX"; then + ac_cv_prog_OBJCXX="$OBJCXX" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_OBJCXX="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +OBJCXX=$ac_cv_prog_OBJCXX +if test -n "$OBJCXX"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJCXX" >&5 +$as_echo "$OBJCXX" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$OBJCXX" && break + done +fi +if test -z "$OBJCXX"; then + ac_ct_OBJCXX=$OBJCXX + for ac_prog in g++ objc++ objcxx c++ CXX +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OBJCXX+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_OBJCXX"; then + ac_cv_prog_ac_ct_OBJCXX="$ac_ct_OBJCXX" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_OBJCXX="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_OBJCXX=$ac_cv_prog_ac_ct_OBJCXX +if test -n "$ac_ct_OBJCXX"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJCXX" >&5 +$as_echo "$ac_ct_OBJCXX" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_OBJCXX" && break +done + + if test "x$ac_ct_OBJCXX" = x; then + OBJCXX="g++" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OBJCXX=$ac_ct_OBJCXX + fi +fi + +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for Objective C++ compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU Objective C++ compiler" >&5 +$as_echo_n "checking whether we are using the GNU Objective C++ compiler... " >&6; } +if ${ac_cv_objcxx_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_objcxx_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_objcxx_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objcxx_compiler_gnu" >&5 +$as_echo "$ac_cv_objcxx_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GOBJCXX=yes +else + GOBJCXX= +fi +ac_test_OBJCXXFLAGS=${OBJCXXFLAGS+set} +ac_save_OBJCXXFLAGS=$OBJCXXFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $OBJCXX accepts -g" >&5 +$as_echo_n "checking whether $OBJCXX accepts -g... " >&6; } +if ${ac_cv_prog_objcxx_g+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_save_objcxx_werror_flag=$ac_objcxx_werror_flag + ac_objcxx_werror_flag=yes + ac_cv_prog_objcxx_g=no + OBJCXXFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_objcxx_try_compile "$LINENO"; then : + ac_cv_prog_objcxx_g=yes +else + OBJCXXFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_objcxx_try_compile "$LINENO"; then : + +else + ac_objcxx_werror_flag=$ac_save_objcxx_werror_flag + OBJCXXFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_objcxx_try_compile "$LINENO"; then : + ac_cv_prog_objcxx_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_objcxx_werror_flag=$ac_save_objcx_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_objcxx_g" >&5 +$as_echo "$ac_cv_prog_objcxx_g" >&6; } +if test "$ac_test_OBJCXXFLAGS" = set; then + OBJCXXFLAGS=$ac_save_OBJCXXFLAGS +elif test $ac_cv_prog_objcxx_g = yes; then + if test "$GOBJCXX" = yes; then + OBJCXXFLAGS="-g -O2" + else + OBJCXXFLAGS="-g" + fi +else + if test "$GOBJCXX" = yes; then + OBJCXXFLAGS="-O2" + else + OBJCXXFLAGS= + fi +fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + + SCPROXY= + NOSCPROXY=# +else + SCPROXY=# + NOSCPROXY= +fi + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-cxx-main=" >&5 $as_echo_n "checking for --with-cxx-main=... " >&6; } diff --git a/configure.ac b/configure.ac index 497d7c191d537..d269c85d6e1e4 100644 --- a/configure.ac +++ b/configure.ac @@ -663,6 +663,25 @@ AC_PROG_SED AC_SUBST(CXX) AC_SUBST(MAINCC) + +if test "$ac_sys_system" = "Darwin" +then + # MacOSX requires an Objective C compiler to + # build some Mac-specific code. + AC_PROG_OBJC + AC_PROG_OBJCXX + AC_SUBST(OBJC) + AC_SUBST(OBJCXX) + SCPROXY= + NOSCPROXY=# +else + SCPROXY=# + NOSCPROXY= +fi + +AC_SUBST(SCPROXY) +AC_SUBST(NOSCPROXY) + AC_MSG_CHECKING(for --with-cxx-main=) AC_ARG_WITH(cxx_main, AS_HELP_STRING([--with-cxx-main@<:@=COMPILER@:>@], diff --git a/setup.py b/setup.py index a220f366e2509..9b2cc7a27424a 100644 --- a/setup.py +++ b/setup.py @@ -1583,10 +1583,11 @@ def detect_platform_specific_exts(self): self.missing.append('ossaudiodev') if MACOS: - self.add(Extension('_scproxy', ['_scproxy.c'], + self.add(Extension('_scproxy', [], extra_link_args=[ '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation'])) + '-framework', 'CoreFoundation'], + extra_objects=['Modules/_scproxy.o'])) def detect_compress_exts(self): # Andrew Kuchling's zlib module. Note that some versions of zlib From webhook-mailer at python.org Mon May 18 10:07:03 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 18 May 2020 14:07:03 -0000 Subject: [Python-checkins] bpo-40548: Github Actions: update actions/checkout to v2 (GH-20164) Message-ID: https://github.com/python/cpython/commit/9ecf25e04cb0b97f7f9a12f50b87ac4fad5dc3f5 commit: 9ecf25e04cb0b97f7f9a12f50b87ac4fad5dc3f5 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-18T07:06:55-07:00 summary: bpo-40548: Github Actions: update actions/checkout to v2 (GH-20164) Signed-off-by: Filipe La?ns (cherry picked from commit c444108dd62672f2b41539bcc8f15da44501f405) Co-authored-by: Filipe La?ns files: M .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 27d7f15aa5931..cf86505980244 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -33,7 +33,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython run: .\PCbuild\build.bat -e -p Win32 - name: Display build info @@ -47,7 +47,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython run: .\PCbuild\build.bat -e -p x64 - name: Display build info @@ -61,7 +61,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Configure CPython run: ./configure --with-pydebug --with-openssl=/usr/local/opt/openssl --prefix=/opt/python-dev - name: Build CPython @@ -79,7 +79,7 @@ jobs: env: OPENSSL_VER: 1.1.1f steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: 'Restore OpenSSL build' From webhook-mailer at python.org Mon May 18 10:07:54 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 18 May 2020 14:07:54 -0000 Subject: [Python-checkins] bpo-40548: Github Actions: update actions/checkout to v2 (GH-20164) Message-ID: https://github.com/python/cpython/commit/d8cbfa2f2a9a972caf9cbc2b1e2565c456e08888 commit: d8cbfa2f2a9a972caf9cbc2b1e2565c456e08888 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-18T07:07:49-07:00 summary: bpo-40548: Github Actions: update actions/checkout to v2 (GH-20164) Signed-off-by: Filipe La?ns (cherry picked from commit c444108dd62672f2b41539bcc8f15da44501f405) Co-authored-by: Filipe La?ns files: M .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 27d7f15aa5931..cf86505980244 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -33,7 +33,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython run: .\PCbuild\build.bat -e -p Win32 - name: Display build info @@ -47,7 +47,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython run: .\PCbuild\build.bat -e -p x64 - name: Display build info @@ -61,7 +61,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Configure CPython run: ./configure --with-pydebug --with-openssl=/usr/local/opt/openssl --prefix=/opt/python-dev - name: Build CPython @@ -79,7 +79,7 @@ jobs: env: OPENSSL_VER: 1.1.1f steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: 'Restore OpenSSL build' From webhook-mailer at python.org Mon May 18 11:31:36 2020 From: webhook-mailer at python.org (Ned Deily) Date: Mon, 18 May 2020 15:31:36 -0000 Subject: [Python-checkins] Revert "bpo-26317: Support OBJC and OBJCXX configure command line variables (GH-20176)" (GH-20182) Message-ID: https://github.com/python/cpython/commit/951ab58024de9b5a21f0b979cdbea51e1049d781 commit: 951ab58024de9b5a21f0b979cdbea51e1049d781 branch: master author: Ned Deily committer: GitHub date: 2020-05-18T11:31:21-04:00 summary: Revert "bpo-26317: Support OBJC and OBJCXX configure command line variables (GH-20176)" (GH-20182) This reverts commit 0da546665075aefbb476e192ed64122d340164f4. The commit is causing make failures on a FreeBSD buildbot. Due to the imminent 3.9.0b1 cutoff, revert this commit for now pending further investigation. files: D Misc/NEWS.d/next/Build/2019-05-14-05-35-14.bpo-26317.no8mw-.rst M Doc/whatsnew/3.9.rst M Mac/Makefile.in M Mac/PythonLauncher/Makefile.in M Makefile.pre.in M Misc/ACKS M aclocal.m4 M configure M configure.ac M setup.py diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 9e42e9db0ca35..66e7c6461ecd7 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -892,14 +892,6 @@ Build Changes functions are now required to build Python. (Contributed by Victor Stinner in :issue:`39395`.) -* The ``OBJC`` and ``OBJCXX`` standard command line options for the configure - script are now supported. This allows building fot macOS with a third-party - compiler, like GNU `gcc`, for everything except for the several - system-specific source files that need system headers that require the - Apple-supplied compile chain (e.g. the ``_scproxy`` helper module and - ``PythonLauncher.app``). - (Contributed by Jeffrey Kintscher in :issue:`26317`.) - C API Changes ============= diff --git a/Mac/Makefile.in b/Mac/Makefile.in index db59fc5d7f532..0b32673323a0d 100644 --- a/Mac/Makefile.in +++ b/Mac/Makefile.in @@ -21,7 +21,6 @@ PYTHONFRAMEWORK=@PYTHONFRAMEWORK@ PYTHONFRAMEWORKIDENTIFIER=@PYTHONFRAMEWORKIDENTIFIER@ LIPO_32BIT_FLAGS=@LIPO_32BIT_FLAGS@ CC=@CC@ -OBJC=@OBJC@ MACOSX_DEPLOYMENT_TARGET=@CONFIGURE_MACOSX_DEPLOYMENT_TARGET@ export MACOSX_DEPLOYMENT_TARGET diff --git a/Mac/PythonLauncher/Makefile.in b/Mac/PythonLauncher/Makefile.in index 1553b75905157..4c05f26e8358b 100644 --- a/Mac/PythonLauncher/Makefile.in +++ b/Mac/PythonLauncher/Makefile.in @@ -1,7 +1,5 @@ CC=@CC@ LD=@CC@ -OBJC=@OBJC@ -OBJCFLAFS=@OBJCFLAGS@ BASECFLAGS=@BASECFLAGS@ OPT=@OPT@ CFLAGS=@CFLAGS@ $(BASECFLAGS) $(OPT) @@ -54,25 +52,25 @@ Python\ Launcher.app: Info.plist \ cp -R $(srcdir)/English.lproj "Python Launcher.app/Contents/Resources" FileSettings.o: $(srcdir)/FileSettings.m - $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/FileSettings.m + $(CC) $(CFLAGS) -o $@ -c $(srcdir)/FileSettings.m MyAppDelegate.o: $(srcdir)/MyAppDelegate.m - $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/MyAppDelegate.m + $(CC) $(CFLAGS) -o $@ -c $(srcdir)/MyAppDelegate.m MyDocument.o: $(srcdir)/MyDocument.m - $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/MyDocument.m + $(CC) $(CFLAGS) -o $@ -c $(srcdir)/MyDocument.m PreferencesWindowController.o: $(srcdir)/PreferencesWindowController.m - $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/PreferencesWindowController.m + $(CC) $(CFLAGS) -o $@ -c $(srcdir)/PreferencesWindowController.m doscript.o: $(srcdir)/doscript.m - $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/doscript.m + $(CC) $(CFLAGS) -o $@ -c $(srcdir)/doscript.m main.o: $(srcdir)/main.m - $(OBJC) $(CFLAGS) -o $@ -c $(srcdir)/main.m + $(CC) $(CFLAGS) -o $@ -c $(srcdir)/main.m Python\ Launcher: $(OBJECTS) - $(OBJC) $(LDFLAGS) -o "Python Launcher" $(OBJECTS) -framework AppKit -framework Carbon + $(CC) $(LDFLAGS) -o "Python Launcher" $(OBJECTS) -framework AppKit -framework Carbon Info.plist: $(srcdir)/Info.plist.in sed 's/%VERSION%/'"`$(RUNSHARED) $(BUILDPYTHON) -c 'import platform; print(platform.python_version())'`"'/g' < $(srcdir)/Info.plist.in > Info.plist diff --git a/Makefile.pre.in b/Makefile.pre.in index 339cbfd56cf8e..dbfd805f1a02f 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -35,7 +35,6 @@ abs_builddir= @abs_builddir@ CC= @CC@ CXX= @CXX@ -OBJC= @OBJC@ MAINCC= @MAINCC@ LINKCC= @LINKCC@ AR= @AR@ @@ -614,22 +613,12 @@ $(srcdir)/Modules/_blake2/blake2s_impl.c: $(srcdir)/Modules/_blake2/blake2b_impl $(PYTHON_FOR_REGEN) $(srcdir)/Modules/_blake2/blake2b2s.py $(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py -f $@ -# _scproxy.o needs to be built outside of setup.py so that we can force -# the use of the OBJC compiler when the CC compiler is different. For -# example, it allows _scproxy.c to be compiled using the clang compiler -# while the rest of the project uses the GNU C compiler. -# -# see issue #26317 for details - at SCPROXY@Modules/_scproxy.o: $(srcdir)/Modules/_scproxy.c $(srcdir)/Include/Python.h - at SCPROXY@ $(OBJC) -c $(CCSHARED) $(PY_CORE_CFLAGS) -o $@ $< - at NOSCPROXY@.PHONY: Modules/_scproxy.o - # Build the shared modules # Under GNU make, MAKEFLAGS are sorted and normalized; the 's' for # -s, --silent or --quiet is always the first char. # Under BSD make, MAKEFLAGS might be " -s -v x=y". # Ignore macros passed by GNU make, passed after -- -sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o Modules/_scproxy.o +sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o @case "`echo X $$MAKEFLAGS | sed 's/^X //;s/ -- .*//'`" in \ *\ -s*|s*) quiet="-q";; \ *) quiet="";; \ diff --git a/Misc/ACKS b/Misc/ACKS index 86b687b825ff2..6511383fa25d7 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -882,7 +882,6 @@ Sam Kimbrel Tomohiko Kinebuchi James King W. Trevor King -Jeffrey Kintscher Paul Kippes Steve Kirsch Sebastian Kirsche diff --git a/Misc/NEWS.d/next/Build/2019-05-14-05-35-14.bpo-26317.no8mw-.rst b/Misc/NEWS.d/next/Build/2019-05-14-05-35-14.bpo-26317.no8mw-.rst deleted file mode 100644 index 4072a4a7cd2b5..0000000000000 --- a/Misc/NEWS.d/next/Build/2019-05-14-05-35-14.bpo-26317.no8mw-.rst +++ /dev/null @@ -1 +0,0 @@ -The OBJC and OBJCXX standard command line options for the configure script are now supported. This allows building fot macOS with a third-party compiler, like GNU gcc, for everything except for the several system-specific source files that need system headers that require the Apple-supplied compile chain (e.g. the _scproxy helper module and PythonLauncher.app). Patch by Jeffrey Kintscher. diff --git a/aclocal.m4 b/aclocal.m4 index 3b865c34602a5..b5f9cb0e8da44 100644 --- a/aclocal.m4 +++ b/aclocal.m4 @@ -55,7 +55,7 @@ dnl dnl See the "Since" comment for each macro you use to see what version dnl of the macros you require. m4_defun([PKG_PREREQ], -[m4_define([PKG_MACROS_VERSION], [0.29.2]) +[m4_define([PKG_MACROS_VERSION], [0.29.1]) m4_if(m4_version_compare(PKG_MACROS_VERSION, [$1]), -1, [m4_fatal([pkg.m4 version $1 or higher is required but ]PKG_MACROS_VERSION[ found])]) ])dnl PKG_PREREQ @@ -156,7 +156,7 @@ AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl pkg_failed=no -AC_MSG_CHECKING([for $2]) +AC_MSG_CHECKING([for $1]) _PKG_CONFIG([$1][_CFLAGS], [cflags], [$2]) _PKG_CONFIG([$1][_LIBS], [libs], [$2]) @@ -166,11 +166,11 @@ and $1[]_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details.]) if test $pkg_failed = yes; then - AC_MSG_RESULT([no]) + AC_MSG_RESULT([no]) _PKG_SHORT_ERRORS_SUPPORTED if test $_pkg_short_errors_supported = yes; then $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` - else + else $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` fi # Put the nasty error message in config.log where it belongs @@ -187,7 +187,7 @@ installed software in a non-standard prefix. _PKG_TEXT])[]dnl ]) elif test $pkg_failed = untried; then - AC_MSG_RESULT([no]) + AC_MSG_RESULT([no]) m4_default([$4], [AC_MSG_FAILURE( [The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full diff --git a/configure b/configure index 4242d5c60ff56..56d66d0235289 100755 --- a/configure +++ b/configure @@ -716,14 +716,6 @@ MULTIARCH_CPPFLAGS PLATFORM_TRIPLET MULTIARCH ac_ct_CXX -NOSCPROXY -SCPROXY -ac_ct_OBJCXX -OBJCXXFLAGS -OBJCXX -ac_ct_OBJC -OBJCFLAGS -OBJC MAINCC CXX SED @@ -868,10 +860,6 @@ LDFLAGS LIBS CPPFLAGS CPP -OBJC -OBJCFLAGS -OBJCXX -OBJCXXFLAGS PROFILE_TASK PKG_CONFIG PKG_CONFIG_PATH @@ -1611,10 +1599,6 @@ Some influential environment variables: CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory CPP C preprocessor - OBJC Objective C compiler command - OBJCFLAGS Objective C compiler flags - OBJCXX Objective C++ compiler command - OBJCXXFLAGS Objective C++ compiler flags PROFILE_TASK Python args for PGO generation task PKG_CONFIG path to pkg-config utility @@ -1778,82 +1762,6 @@ fi } # ac_fn_c_try_cpp -# ac_fn_objc_try_compile LINENO -# ----------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_objc_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_objc_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_objc_try_compile - -# ac_fn_objcxx_try_compile LINENO -# ------------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_objcxx_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_objcxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_objcxx_try_compile - # ac_fn_c_try_link LINENO # ----------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. @@ -4733,525 +4641,6 @@ $as_echo "$ac_cv_path_SED" >&6; } - -if test "$ac_sys_system" = "Darwin" -then - # MacOSX requires an Objective C compiler to - # build some Mac-specific code. - ac_ext=m -ac_cpp='$OBJCPP $CPPFLAGS' -ac_compile='$OBJC -c $OBJCFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$OBJC -o conftest$ac_exeext $OBJCFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_objc_compiler_gnu -if test -n "$ac_tool_prefix"; then - for ac_prog in gcc objcc objc cc CC - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_OBJC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$OBJC"; then - ac_cv_prog_OBJC="$OBJC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_OBJC="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -OBJC=$ac_cv_prog_OBJC -if test -n "$OBJC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJC" >&5 -$as_echo "$OBJC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$OBJC" && break - done -fi -if test -z "$OBJC"; then - ac_ct_OBJC=$OBJC - for ac_prog in gcc objcc objc cc CC -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_OBJC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_OBJC"; then - ac_cv_prog_ac_ct_OBJC="$ac_ct_OBJC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_OBJC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_OBJC=$ac_cv_prog_ac_ct_OBJC -if test -n "$ac_ct_OBJC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJC" >&5 -$as_echo "$ac_ct_OBJC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_OBJC" && break -done - - if test "x$ac_ct_OBJC" = x; then - OBJC="gcc" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - OBJC=$ac_ct_OBJC - fi -fi - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for Objective C compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU Objective C compiler" >&5 -$as_echo_n "checking whether we are using the GNU Objective C compiler... " >&6; } -if ${ac_cv_objc_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -#ifndef __GNUC__ - choke me -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_objc_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_objc_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objc_compiler_gnu" >&5 -$as_echo "$ac_cv_objc_compiler_gnu" >&6; } -if test $ac_compiler_gnu = yes; then - GOBJC=yes -else - GOBJC= -fi -ac_test_OBJCFLAGS=${OBJCFLAGS+set} -ac_save_OBJCFLAGS=$OBJCFLAGS -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $OBJC accepts -g" >&5 -$as_echo_n "checking whether $OBJC accepts -g... " >&6; } -if ${ac_cv_prog_objc_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_save_objc_werror_flag=$ac_objc_werror_flag - ac_objc_werror_flag=yes - ac_cv_prog_objc_g=no - OBJCFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_objc_try_compile "$LINENO"; then : - ac_cv_prog_objc_g=yes -else - OBJCFLAGS="" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_objc_try_compile "$LINENO"; then : - -else - ac_objc_werror_flag=$ac_save_objc_werror_flag - OBJCFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_objc_try_compile "$LINENO"; then : - ac_cv_prog_objc_g=yes -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - ac_objc_werror_flag=$ac_save_objc_werror_flag -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_objc_g" >&5 -$as_echo "$ac_cv_prog_objc_g" >&6; } -if test "$ac_test_OBJCFLAGS" = set; then - OBJCFLAGS=$ac_save_OBJCFLAGS -elif test $ac_cv_prog_objc_g = yes; then - if test "$GOBJC" = yes; then - OBJCFLAGS="-g -O2" - else - OBJCFLAGS="-g" - fi -else - if test "$GOBJC" = yes; then - OBJCFLAGS="-O2" - else - OBJCFLAGS= - fi -fi -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - ac_ext=mm -ac_cpp='$OBJCXXCPP $CPPFLAGS' -ac_compile='$OBJCXX -c $OBJCXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$OBJCXX -o conftest$ac_exeext $OBJCXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_objcxx_compiler_gnu -if test -n "$ac_tool_prefix"; then - for ac_prog in g++ objc++ objcxx c++ CXX - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_OBJCXX+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$OBJCXX"; then - ac_cv_prog_OBJCXX="$OBJCXX" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_OBJCXX="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -OBJCXX=$ac_cv_prog_OBJCXX -if test -n "$OBJCXX"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJCXX" >&5 -$as_echo "$OBJCXX" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$OBJCXX" && break - done -fi -if test -z "$OBJCXX"; then - ac_ct_OBJCXX=$OBJCXX - for ac_prog in g++ objc++ objcxx c++ CXX -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_OBJCXX+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_OBJCXX"; then - ac_cv_prog_ac_ct_OBJCXX="$ac_ct_OBJCXX" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_OBJCXX="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_OBJCXX=$ac_cv_prog_ac_ct_OBJCXX -if test -n "$ac_ct_OBJCXX"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJCXX" >&5 -$as_echo "$ac_ct_OBJCXX" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_OBJCXX" && break -done - - if test "x$ac_ct_OBJCXX" = x; then - OBJCXX="g++" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - OBJCXX=$ac_ct_OBJCXX - fi -fi - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for Objective C++ compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU Objective C++ compiler" >&5 -$as_echo_n "checking whether we are using the GNU Objective C++ compiler... " >&6; } -if ${ac_cv_objcxx_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -#ifndef __GNUC__ - choke me -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_objcxx_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_objcxx_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objcxx_compiler_gnu" >&5 -$as_echo "$ac_cv_objcxx_compiler_gnu" >&6; } -if test $ac_compiler_gnu = yes; then - GOBJCXX=yes -else - GOBJCXX= -fi -ac_test_OBJCXXFLAGS=${OBJCXXFLAGS+set} -ac_save_OBJCXXFLAGS=$OBJCXXFLAGS -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $OBJCXX accepts -g" >&5 -$as_echo_n "checking whether $OBJCXX accepts -g... " >&6; } -if ${ac_cv_prog_objcxx_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_save_objcxx_werror_flag=$ac_objcxx_werror_flag - ac_objcxx_werror_flag=yes - ac_cv_prog_objcxx_g=no - OBJCXXFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_objcxx_try_compile "$LINENO"; then : - ac_cv_prog_objcxx_g=yes -else - OBJCXXFLAGS="" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_objcxx_try_compile "$LINENO"; then : - -else - ac_objcxx_werror_flag=$ac_save_objcxx_werror_flag - OBJCXXFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_objcxx_try_compile "$LINENO"; then : - ac_cv_prog_objcxx_g=yes -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - ac_objcxx_werror_flag=$ac_save_objcx_werror_flag -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_objcxx_g" >&5 -$as_echo "$ac_cv_prog_objcxx_g" >&6; } -if test "$ac_test_OBJCXXFLAGS" = set; then - OBJCXXFLAGS=$ac_save_OBJCXXFLAGS -elif test $ac_cv_prog_objcxx_g = yes; then - if test "$GOBJCXX" = yes; then - OBJCXXFLAGS="-g -O2" - else - OBJCXXFLAGS="-g" - fi -else - if test "$GOBJCXX" = yes; then - OBJCXXFLAGS="-O2" - else - OBJCXXFLAGS= - fi -fi -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - - SCPROXY= - NOSCPROXY=# -else - SCPROXY=# - NOSCPROXY= -fi - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-cxx-main=" >&5 $as_echo_n "checking for --with-cxx-main=... " >&6; } diff --git a/configure.ac b/configure.ac index d269c85d6e1e4..497d7c191d537 100644 --- a/configure.ac +++ b/configure.ac @@ -663,25 +663,6 @@ AC_PROG_SED AC_SUBST(CXX) AC_SUBST(MAINCC) - -if test "$ac_sys_system" = "Darwin" -then - # MacOSX requires an Objective C compiler to - # build some Mac-specific code. - AC_PROG_OBJC - AC_PROG_OBJCXX - AC_SUBST(OBJC) - AC_SUBST(OBJCXX) - SCPROXY= - NOSCPROXY=# -else - SCPROXY=# - NOSCPROXY= -fi - -AC_SUBST(SCPROXY) -AC_SUBST(NOSCPROXY) - AC_MSG_CHECKING(for --with-cxx-main=) AC_ARG_WITH(cxx_main, AS_HELP_STRING([--with-cxx-main@<:@=COMPILER@:>@], diff --git a/setup.py b/setup.py index 9b2cc7a27424a..a220f366e2509 100644 --- a/setup.py +++ b/setup.py @@ -1583,11 +1583,10 @@ def detect_platform_specific_exts(self): self.missing.append('ossaudiodev') if MACOS: - self.add(Extension('_scproxy', [], + self.add(Extension('_scproxy', ['_scproxy.c'], extra_link_args=[ '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation'], - extra_objects=['Modules/_scproxy.o'])) + '-framework', 'CoreFoundation'])) def detect_compress_exts(self): # Andrew Kuchling's zlib module. Note that some versions of zlib From webhook-mailer at python.org Mon May 18 12:17:24 2020 From: webhook-mailer at python.org (Minmin Gong) Date: Mon, 18 May 2020 16:17:24 -0000 Subject: [Python-checkins] bpo-40653: Move _dirnameW out of #ifdef HAVE_SYMLINK/#endif (GH-20144) Message-ID: https://github.com/python/cpython/commit/7f21c9ac872acc2114aee3313d132b016550ff42 commit: 7f21c9ac872acc2114aee3313d132b016550ff42 branch: master author: Minmin Gong committer: GitHub date: 2020-05-18T17:17:19+01:00 summary: bpo-40653: Move _dirnameW out of #ifdef HAVE_SYMLINK/#endif (GH-20144) files: A Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst M Modules/posixmodule.c diff --git a/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst b/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst new file mode 100644 index 0000000000000..1e6c5cb32b722 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst @@ -0,0 +1 @@ +Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling issue. \ No newline at end of file diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 2ddf30de89a68..ddff28354a7c1 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -8156,8 +8156,6 @@ os_readlink_impl(PyObject *module, path_t *path, int dir_fd) } #endif /* defined(HAVE_READLINK) || defined(MS_WINDOWS) */ -#ifdef HAVE_SYMLINK - #if defined(MS_WINDOWS) /* Remove the last portion of the path - return 0 on success */ @@ -8180,6 +8178,12 @@ _dirnameW(WCHAR *path) return 0; } +#endif + +#ifdef HAVE_SYMLINK + +#if defined(MS_WINDOWS) + /* Is this path absolute? */ static int _is_absW(const WCHAR *path) From webhook-mailer at python.org Mon May 18 12:23:01 2020 From: webhook-mailer at python.org (Minmin Gong) Date: Mon, 18 May 2020 16:23:01 -0000 Subject: [Python-checkins] bpo-40650: Include winsock2.h in pytime.c, instead of a full windows.h (GH-20137) Message-ID: https://github.com/python/cpython/commit/f660567937277cc3a2cd53af77bbb18e905427e8 commit: f660567937277cc3a2cd53af77bbb18e905427e8 branch: master author: Minmin Gong committer: GitHub date: 2020-05-18T17:22:53+01:00 summary: bpo-40650: Include winsock2.h in pytime.c, instead of a full windows.h (GH-20137) files: A Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst M Python/pytime.c diff --git a/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst b/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst new file mode 100644 index 0000000000000..db13e58b14a79 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst @@ -0,0 +1 @@ +Include winsock2.h in pytime.c for timeval. \ No newline at end of file diff --git a/Python/pytime.c b/Python/pytime.c index 6affccbeffa88..b121b432f428d 100644 --- a/Python/pytime.c +++ b/Python/pytime.c @@ -1,6 +1,6 @@ #include "Python.h" #ifdef MS_WINDOWS -#include +#include /* struct timeval */ #endif #if defined(__APPLE__) From webhook-mailer at python.org Mon May 18 12:34:29 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 18 May 2020 16:34:29 -0000 Subject: [Python-checkins] bpo-40653: Move _dirnameW out of GH-ifdef HAVE_SYMLINK/GH-endif (GH-20144) Message-ID: https://github.com/python/cpython/commit/6da26f8cec5c3f012e2fd001042ccadcd8aba640 commit: 6da26f8cec5c3f012e2fd001042ccadcd8aba640 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-18T09:34:21-07:00 summary: bpo-40653: Move _dirnameW out of GH-ifdef HAVE_SYMLINK/GH-endif (GH-20144) (cherry picked from commit 7f21c9ac872acc2114aee3313d132b016550ff42) Co-authored-by: Minmin Gong files: A Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst M Modules/posixmodule.c diff --git a/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst b/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst new file mode 100644 index 0000000000000..1e6c5cb32b722 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst @@ -0,0 +1 @@ +Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling issue. \ No newline at end of file diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 977e49f432a34..43d4302b92de7 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -7301,8 +7301,6 @@ win_readlink(PyObject *self, PyObject *args, PyObject *kwargs) -#ifdef HAVE_SYMLINK - #if defined(MS_WINDOWS) /* Grab CreateSymbolicLinkW dynamically from kernel32 */ @@ -7341,6 +7339,12 @@ _dirnameW(WCHAR *path) return 0; } +#endif + +#ifdef HAVE_SYMLINK + +#if defined(MS_WINDOWS) + /* Is this path absolute? */ static int _is_absW(const WCHAR *path) From webhook-mailer at python.org Mon May 18 12:36:10 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 18 May 2020 16:36:10 -0000 Subject: [Python-checkins] bpo-40653: Move _dirnameW out of GH-ifdef HAVE_SYMLINK/GH-endif (GH-20144) Message-ID: https://github.com/python/cpython/commit/ddd5bbdeec8cf18ef074b3506c96d22ac3dc1f93 commit: ddd5bbdeec8cf18ef074b3506c96d22ac3dc1f93 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-18T09:36:02-07:00 summary: bpo-40653: Move _dirnameW out of GH-ifdef HAVE_SYMLINK/GH-endif (GH-20144) (cherry picked from commit 7f21c9ac872acc2114aee3313d132b016550ff42) Co-authored-by: Minmin Gong files: A Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst M Modules/posixmodule.c diff --git a/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst b/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst new file mode 100644 index 0000000000000..1e6c5cb32b722 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst @@ -0,0 +1 @@ +Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling issue. \ No newline at end of file diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index eb0b56aebbaa3..726e3723f99d3 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -8019,8 +8019,6 @@ os_readlink_impl(PyObject *module, path_t *path, int dir_fd) } #endif /* defined(HAVE_READLINK) || defined(MS_WINDOWS) */ -#ifdef HAVE_SYMLINK - #if defined(MS_WINDOWS) /* Remove the last portion of the path - return 0 on success */ @@ -8043,6 +8041,12 @@ _dirnameW(WCHAR *path) return 0; } +#endif + +#ifdef HAVE_SYMLINK + +#if defined(MS_WINDOWS) + /* Is this path absolute? */ static int _is_absW(const WCHAR *path) From webhook-mailer at python.org Mon May 18 12:39:20 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 18 May 2020 16:39:20 -0000 Subject: [Python-checkins] bpo-40650: Include winsock2.h in pytime.c, instead of a full windows.h (GH-20137) Message-ID: https://github.com/python/cpython/commit/8e49c1fcf15969ca5c585648a5520617cc0566e6 commit: 8e49c1fcf15969ca5c585648a5520617cc0566e6 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-18T09:39:13-07:00 summary: bpo-40650: Include winsock2.h in pytime.c, instead of a full windows.h (GH-20137) (cherry picked from commit f660567937277cc3a2cd53af77bbb18e905427e8) Co-authored-by: Minmin Gong files: A Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst M Python/pytime.c diff --git a/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst b/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst new file mode 100644 index 0000000000000..db13e58b14a79 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst @@ -0,0 +1 @@ +Include winsock2.h in pytime.c for timeval. \ No newline at end of file diff --git a/Python/pytime.c b/Python/pytime.c index 0e9413174195d..3432551bcd8cb 100644 --- a/Python/pytime.c +++ b/Python/pytime.c @@ -1,6 +1,6 @@ #include "Python.h" #ifdef MS_WINDOWS -#include +#include /* struct timeval */ #endif #if defined(__APPLE__) From webhook-mailer at python.org Mon May 18 12:42:22 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 18 May 2020 16:42:22 -0000 Subject: [Python-checkins] bpo-40650: Include winsock2.h in pytime.c, instead of a full windows.h (GH-20137) Message-ID: https://github.com/python/cpython/commit/ab9d9535aad5e627cb9ae471f186e27a65e48c6e commit: ab9d9535aad5e627cb9ae471f186e27a65e48c6e branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-18T09:42:14-07:00 summary: bpo-40650: Include winsock2.h in pytime.c, instead of a full windows.h (GH-20137) (cherry picked from commit f660567937277cc3a2cd53af77bbb18e905427e8) Co-authored-by: Minmin Gong files: A Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst M Python/pytime.c diff --git a/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst b/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst new file mode 100644 index 0000000000000..db13e58b14a79 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst @@ -0,0 +1 @@ +Include winsock2.h in pytime.c for timeval. \ No newline at end of file diff --git a/Python/pytime.c b/Python/pytime.c index 9ff300699f04a..109d52692ce48 100644 --- a/Python/pytime.c +++ b/Python/pytime.c @@ -1,6 +1,6 @@ #include "Python.h" #ifdef MS_WINDOWS -#include +#include /* struct timeval */ #endif #if defined(__APPLE__) From webhook-mailer at python.org Mon May 18 12:50:12 2020 From: webhook-mailer at python.org (Minmin Gong) Date: Mon, 18 May 2020 16:50:12 -0000 Subject: [Python-checkins] bpo-35890: Use RegQueryInfoKeyW and CryptAcquireContextW explicitly (GH-19974) Message-ID: https://github.com/python/cpython/commit/98e42d1f882b9b59f587d538c562dbc7d11c64c3 commit: 98e42d1f882b9b59f587d538c562dbc7d11c64c3 branch: master author: Minmin Gong committer: GitHub date: 2020-05-18T17:50:03+01:00 summary: bpo-35890: Use RegQueryInfoKeyW and CryptAcquireContextW explicitly (GH-19974) files: M PC/getpathp.c M PC/winreg.c M Python/bootstrap_hash.c diff --git a/PC/getpathp.c b/PC/getpathp.c index d23d2bbde809f..fd5cfa7e1a8a3 100644 --- a/PC/getpathp.c +++ b/PC/getpathp.c @@ -358,7 +358,7 @@ getpythonregpath(HKEY keyBase, int skipcore) goto done; } /* Find out how big our core buffer is, and how many subkeys we have */ - rc = RegQueryInfoKey(newKey, NULL, NULL, NULL, &numKeys, NULL, NULL, + rc = RegQueryInfoKeyW(newKey, NULL, NULL, NULL, &numKeys, NULL, NULL, NULL, NULL, &dataSize, NULL, NULL); if (rc!=ERROR_SUCCESS) { goto done; diff --git a/PC/winreg.c b/PC/winreg.c index 3e13e75826f15..1305b7030fada 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -1451,9 +1451,9 @@ winreg_QueryInfoKey_impl(PyObject *module, HKEY key) if (PySys_Audit("winreg.QueryInfoKey", "n", (Py_ssize_t)key) < 0) { return NULL; } - if ((rc = RegQueryInfoKey(key, NULL, NULL, 0, &nSubKeys, NULL, NULL, - &nValues, NULL, NULL, NULL, &ft)) - != ERROR_SUCCESS) { + if ((rc = RegQueryInfoKeyW(key, NULL, NULL, 0, &nSubKeys, NULL, NULL, + &nValues, NULL, NULL, NULL, &ft)) + != ERROR_SUCCESS) { return PyErr_SetFromWindowsErrWithFunction(rc, "RegQueryInfoKey"); } li.LowPart = ft.dwLowDateTime; diff --git a/Python/bootstrap_hash.c b/Python/bootstrap_hash.c index aa3a3dfd964ed..b2109275014b2 100644 --- a/Python/bootstrap_hash.c +++ b/Python/bootstrap_hash.c @@ -38,8 +38,8 @@ static int win32_urandom_init(int raise) { /* Acquire context */ - if (!CryptAcquireContext(&hCryptProv, NULL, NULL, - PROV_RSA_FULL, CRYPT_VERIFYCONTEXT)) + if (!CryptAcquireContextW(&hCryptProv, NULL, NULL, + PROV_RSA_FULL, CRYPT_VERIFYCONTEXT)) goto error; return 0; From webhook-mailer at python.org Mon May 18 13:10:03 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 18 May 2020 17:10:03 -0000 Subject: [Python-checkins] bpo-35890: Use RegQueryInfoKeyW and CryptAcquireContextW explicitly (GH-19974) Message-ID: https://github.com/python/cpython/commit/460eac20a625d5dcef409dadc120a26d272a8013 commit: 460eac20a625d5dcef409dadc120a26d272a8013 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-18T10:09:59-07:00 summary: bpo-35890: Use RegQueryInfoKeyW and CryptAcquireContextW explicitly (GH-19974) (cherry picked from commit 98e42d1f882b9b59f587d538c562dbc7d11c64c3) Co-authored-by: Minmin Gong files: M PC/getpathp.c M PC/winreg.c M Python/bootstrap_hash.c diff --git a/PC/getpathp.c b/PC/getpathp.c index 3747ffb2d89f0..53653c81fb45f 100644 --- a/PC/getpathp.c +++ b/PC/getpathp.c @@ -405,7 +405,7 @@ getpythonregpath(HKEY keyBase, int skipcore) goto done; } /* Find out how big our core buffer is, and how many subkeys we have */ - rc = RegQueryInfoKey(newKey, NULL, NULL, NULL, &numKeys, NULL, NULL, + rc = RegQueryInfoKeyW(newKey, NULL, NULL, NULL, &numKeys, NULL, NULL, NULL, NULL, &dataSize, NULL, NULL); if (rc!=ERROR_SUCCESS) { goto done; diff --git a/PC/winreg.c b/PC/winreg.c index 5dff7deadf767..caad18e045bcb 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -1451,9 +1451,9 @@ winreg_QueryInfoKey_impl(PyObject *module, HKEY key) if (PySys_Audit("winreg.QueryInfoKey", "n", (Py_ssize_t)key) < 0) { return NULL; } - if ((rc = RegQueryInfoKey(key, NULL, NULL, 0, &nSubKeys, NULL, NULL, - &nValues, NULL, NULL, NULL, &ft)) - != ERROR_SUCCESS) { + if ((rc = RegQueryInfoKeyW(key, NULL, NULL, 0, &nSubKeys, NULL, NULL, + &nValues, NULL, NULL, NULL, &ft)) + != ERROR_SUCCESS) { return PyErr_SetFromWindowsErrWithFunction(rc, "RegQueryInfoKey"); } li.LowPart = ft.dwLowDateTime; diff --git a/Python/bootstrap_hash.c b/Python/bootstrap_hash.c index 43f5264d86250..eb2b6d08d8e14 100644 --- a/Python/bootstrap_hash.c +++ b/Python/bootstrap_hash.c @@ -38,8 +38,8 @@ static int win32_urandom_init(int raise) { /* Acquire context */ - if (!CryptAcquireContext(&hCryptProv, NULL, NULL, - PROV_RSA_FULL, CRYPT_VERIFYCONTEXT)) + if (!CryptAcquireContextW(&hCryptProv, NULL, NULL, + PROV_RSA_FULL, CRYPT_VERIFYCONTEXT)) goto error; return 0; From webhook-mailer at python.org Mon May 18 13:25:15 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Mon, 18 May 2020 17:25:15 -0000 Subject: [Python-checkins] bpo-40257: Revert changes to inspect.getdoc() (GH-20073) Message-ID: https://github.com/python/cpython/commit/08b47c367a08f571a986366aa33828d3951fa88d commit: 08b47c367a08f571a986366aa33828d3951fa88d branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-18T20:25:07+03:00 summary: bpo-40257: Revert changes to inspect.getdoc() (GH-20073) files: A Misc/NEWS.d/next/Library/2020-05-13-23-10-25.bpo-40257.aR4TGp.rst M Doc/library/inspect.rst M Doc/whatsnew/3.9.rst M Lib/inspect.py M Lib/pydoc.py M Lib/test/test_inspect.py diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 634645124c786..d00a30ff00406 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -473,15 +473,12 @@ Retrieving source code Get the documentation string for an object, cleaned up with :func:`cleandoc`. If the documentation string for an object is not provided and the object is - a method, a property or a descriptor, retrieve the documentation + a class, a method, a property or a descriptor, retrieve the documentation string from the inheritance hierarchy. .. versionchanged:: 3.5 Documentation strings are now inherited if not overridden. - .. versionchanged:: 3.9 - Documentation strings for classes are no longer inherited. - .. function:: getcomments(object) diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 66e7c6461ecd7..593f523828703 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -846,11 +846,6 @@ Changes in the Python API :class:`ftplib.FTP_TLS` as a keyword-only parameter, and the default encoding is changed from Latin-1 to UTF-8 to follow :rfc:`2640`. -* :func:`inspect.getdoc` no longer returns docstring inherited from the type - of the object or from parent class if it is a class if it is not defined - in the object itself. - (Contributed by Serhiy Storchaka in :issue:`40257`.) - * :meth:`asyncio.loop.shutdown_default_executor` has been added to :class:`~asyncio.AbstractEventLoop`, meaning alternative event loops that inherit from it should have this method defined. diff --git a/Lib/inspect.py b/Lib/inspect.py index ad7e8cb1203e7..887a3424057b6 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -543,6 +543,17 @@ def _findclass(func): return cls def _finddoc(obj): + if isclass(obj): + for base in obj.__mro__: + if base is not object: + try: + doc = base.__doc__ + except AttributeError: + continue + if doc is not None: + return doc + return None + if ismethod(obj): name = obj.__func__.__name__ self = obj.__self__ @@ -586,35 +597,23 @@ def _finddoc(obj): return None for base in cls.__mro__: try: - doc = _getowndoc(getattr(base, name)) + doc = getattr(base, name).__doc__ except AttributeError: continue if doc is not None: return doc return None -def _getowndoc(obj): - """Get the documentation string for an object if it is not - inherited from its class.""" - try: - doc = object.__getattribute__(obj, '__doc__') - if doc is None: - return None - if obj is not type: - typedoc = type(obj).__doc__ - if isinstance(typedoc, str) and typedoc == doc: - return None - return doc - except AttributeError: - return None - def getdoc(object): """Get the documentation string for an object. All tabs are expanded to spaces. To clean up docstrings that are indented to line up with blocks of code, any whitespace than can be uniformly removed from the second line onwards is removed.""" - doc = _getowndoc(object) + try: + doc = object.__doc__ + except AttributeError: + return None if doc is None: try: doc = _finddoc(object) diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 898cc44b295ee..628f9fc7d1d1e 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -90,9 +90,101 @@ def pathdirs(): normdirs.append(normdir) return dirs +def _findclass(func): + cls = sys.modules.get(func.__module__) + if cls is None: + return None + for name in func.__qualname__.split('.')[:-1]: + cls = getattr(cls, name) + if not inspect.isclass(cls): + return None + return cls + +def _finddoc(obj): + if inspect.ismethod(obj): + name = obj.__func__.__name__ + self = obj.__self__ + if (inspect.isclass(self) and + getattr(getattr(self, name, None), '__func__') is obj.__func__): + # classmethod + cls = self + else: + cls = self.__class__ + elif inspect.isfunction(obj): + name = obj.__name__ + cls = _findclass(obj) + if cls is None or getattr(cls, name) is not obj: + return None + elif inspect.isbuiltin(obj): + name = obj.__name__ + self = obj.__self__ + if (inspect.isclass(self) and + self.__qualname__ + '.' + name == obj.__qualname__): + # classmethod + cls = self + else: + cls = self.__class__ + # Should be tested before isdatadescriptor(). + elif isinstance(obj, property): + func = obj.fget + name = func.__name__ + cls = _findclass(func) + if cls is None or getattr(cls, name) is not obj: + return None + elif inspect.ismethoddescriptor(obj) or inspect.isdatadescriptor(obj): + name = obj.__name__ + cls = obj.__objclass__ + if getattr(cls, name) is not obj: + return None + if inspect.ismemberdescriptor(obj): + slots = getattr(cls, '__slots__', None) + if isinstance(slots, dict) and name in slots: + return slots[name] + else: + return None + for base in cls.__mro__: + try: + doc = _getowndoc(getattr(base, name)) + except AttributeError: + continue + if doc is not None: + return doc + return None + +def _getowndoc(obj): + """Get the documentation string for an object if it is not + inherited from its class.""" + try: + doc = object.__getattribute__(obj, '__doc__') + if doc is None: + return None + if obj is not type: + typedoc = type(obj).__doc__ + if isinstance(typedoc, str) and typedoc == doc: + return None + return doc + except AttributeError: + return None + +def _getdoc(object): + """Get the documentation string for an object. + + All tabs are expanded to spaces. To clean up docstrings that are + indented to line up with blocks of code, any whitespace than can be + uniformly removed from the second line onwards is removed.""" + doc = _getowndoc(object) + if doc is None: + try: + doc = _finddoc(object) + except (AttributeError, TypeError): + return None + if not isinstance(doc, str): + return None + return inspect.cleandoc(doc) + def getdoc(object): """Get the doc string or comments for an object.""" - result = inspect.getdoc(object) or inspect.getcomments(object) + result = _getdoc(object) or inspect.getcomments(object) return result and re.sub('^ *\n', '', result.rstrip()) or '' def splitdoc(doc): @@ -1669,7 +1761,7 @@ def render_doc(thing, title='Python Library Documentation: %s', forceload=0, inspect.isclass(object) or inspect.isroutine(object) or inspect.isdatadescriptor(object) or - inspect.getdoc(object)): + _getdoc(object)): # If the passed object is a piece of data or an instance, # document its available methods instead of its value. if hasattr(object, '__origin__'): diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index 98a9c0a662a09..e3e2be52076c6 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -439,7 +439,8 @@ def test_getdoc(self): @unittest.skipIf(sys.flags.optimize >= 2, "Docstrings are omitted with -O2 and above") def test_getdoc_inherited(self): - self.assertIsNone(inspect.getdoc(mod.FesteringGob)) + self.assertEqual(inspect.getdoc(mod.FesteringGob), + 'A longer,\n\nindented\n\ndocstring.') self.assertEqual(inspect.getdoc(mod.FesteringGob.abuse), 'Another\n\ndocstring\n\ncontaining\n\ntabs') self.assertEqual(inspect.getdoc(mod.FesteringGob().abuse), @@ -447,20 +448,10 @@ def test_getdoc_inherited(self): self.assertEqual(inspect.getdoc(mod.FesteringGob.contradiction), 'The automatic gainsaying.') - @unittest.skipIf(MISSING_C_DOCSTRINGS, "test requires docstrings") - def test_getowndoc(self): - getowndoc = inspect._getowndoc - self.assertEqual(getowndoc(type), type.__doc__) - self.assertEqual(getowndoc(int), int.__doc__) - self.assertEqual(getowndoc(int.to_bytes), int.to_bytes.__doc__) - self.assertEqual(getowndoc(int().to_bytes), int.to_bytes.__doc__) - self.assertEqual(getowndoc(int.from_bytes), int.from_bytes.__doc__) - self.assertEqual(getowndoc(int.real), int.real.__doc__) - @unittest.skipIf(MISSING_C_DOCSTRINGS, "test requires docstrings") def test_finddoc(self): finddoc = inspect._finddoc - self.assertIsNone(finddoc(int)) + self.assertEqual(finddoc(int), int.__doc__) self.assertEqual(finddoc(int.to_bytes), int.to_bytes.__doc__) self.assertEqual(finddoc(int().to_bytes), int.to_bytes.__doc__) self.assertEqual(finddoc(int.from_bytes), int.from_bytes.__doc__) diff --git a/Misc/NEWS.d/next/Library/2020-05-13-23-10-25.bpo-40257.aR4TGp.rst b/Misc/NEWS.d/next/Library/2020-05-13-23-10-25.bpo-40257.aR4TGp.rst new file mode 100644 index 0000000000000..9d4037bc9aa79 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-13-23-10-25.bpo-40257.aR4TGp.rst @@ -0,0 +1 @@ +Revert changes to :func:`inspect.getdoc`. From webhook-mailer at python.org Mon May 18 13:32:11 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Mon, 18 May 2020 17:32:11 -0000 Subject: [Python-checkins] bpo-40661: Fix segfault when parsing invalid input (GH-20165) Message-ID: https://github.com/python/cpython/commit/7b7a21bc4fd063b26a2d1882fddc458861497812 commit: 7b7a21bc4fd063b26a2d1882fddc458861497812 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-18T18:32:03+01:00 summary: bpo-40661: Fix segfault when parsing invalid input (GH-20165) Fix segfaults when parsing very complex invalid input, like `import ?? ?????????????????????????????\n????`. Co-authored-by: Guido van Rossum Co-authored-by: Pablo Galindo files: M Lib/test/test_peg_parser.py M Lib/test/test_syntax.py M Parser/pegen/parse.c M Tools/peg_generator/pegen/c_generator.py diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index 9614e45799dd8..6ccb2573176bb 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -591,6 +591,7 @@ def f(*a, b): ("f-string_single_closing_brace", "f'}'"), ("from_import_invalid", "from import import a"), ("from_import_trailing_comma", "from a import b,"), + ("import_non_ascii_syntax_error", "import ? ?"), # This test case checks error paths involving tokens with uninitialized # values of col_offset and end_col_offset. ("invalid indentation", diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 60c7d9fd3868e..87ceced6c62a0 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -659,6 +659,9 @@ Traceback (most recent call last): SyntaxError: cannot assign to __debug__ + >>> import ? ? + Traceback (most recent call last): + SyntaxError: invalid character '?' (U+00A3) """ import re diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 2a9dad7d1d7ef..e9c20327c155a 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -683,6 +683,9 @@ file_rule(Parser *p) mod_ty _res = NULL; int _mark = p->mark; { // statements? $ + if (p->error_indicator) { + return NULL; + } void *a; Token * endmarker_var; if ( @@ -715,6 +718,9 @@ interactive_rule(Parser *p) mod_ty _res = NULL; int _mark = p->mark; { // statement_newline + if (p->error_indicator) { + return NULL; + } asdl_seq* a; if ( (a = statement_newline_rule(p)) // statement_newline @@ -744,6 +750,9 @@ eval_rule(Parser *p) mod_ty _res = NULL; int _mark = p->mark; { // expressions NEWLINE* $ + if (p->error_indicator) { + return NULL; + } asdl_seq * _loop0_1_var; expr_ty a; Token * endmarker_var; @@ -779,6 +788,9 @@ func_type_rule(Parser *p) mod_ty _res = NULL; int _mark = p->mark; { // '(' type_expressions? ')' '->' expression NEWLINE* $ + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; Token * _literal_2; @@ -826,6 +838,9 @@ fstring_rule(Parser *p) expr_ty _res = NULL; int _mark = p->mark; { // star_expressions + if (p->error_indicator) { + return NULL; + } expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions @@ -858,6 +873,9 @@ type_expressions_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.expression+ ',' '*' expression ',' '**' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; Token * _literal_2; @@ -891,6 +909,9 @@ type_expressions_rule(Parser *p) p->mark = _mark; } { // ','.expression+ ',' '*' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq * a; @@ -915,6 +936,9 @@ type_expressions_rule(Parser *p) p->mark = _mark; } { // ','.expression+ ',' '**' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq * a; @@ -939,6 +963,9 @@ type_expressions_rule(Parser *p) p->mark = _mark; } { // '*' expression ',' '**' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; Token * _literal_2; @@ -966,6 +993,9 @@ type_expressions_rule(Parser *p) p->mark = _mark; } { // '*' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -984,6 +1014,9 @@ type_expressions_rule(Parser *p) p->mark = _mark; } { // '**' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -1002,6 +1035,9 @@ type_expressions_rule(Parser *p) p->mark = _mark; } { // ','.expression+ + if (p->error_indicator) { + return NULL; + } asdl_seq * _gather_9_var; if ( (_gather_9_var = _gather_9_rule(p)) // ','.expression+ @@ -1027,6 +1063,9 @@ statements_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // statement+ + if (p->error_indicator) { + return NULL; + } asdl_seq * a; if ( (a = _loop1_11_rule(p)) // statement+ @@ -1056,6 +1095,9 @@ statement_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // compound_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty a; if ( (a = compound_stmt_rule(p)) // compound_stmt @@ -1071,6 +1113,9 @@ statement_rule(Parser *p) p->mark = _mark; } { // simple_stmt + if (p->error_indicator) { + return NULL; + } asdl_seq* simple_stmt_var; if ( (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt @@ -1104,6 +1149,9 @@ statement_newline_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // compound_stmt NEWLINE + if (p->error_indicator) { + return NULL; + } stmt_ty a; Token * newline_var; if ( @@ -1122,6 +1170,9 @@ statement_newline_rule(Parser *p) p->mark = _mark; } { // simple_stmt + if (p->error_indicator) { + return NULL; + } asdl_seq* simple_stmt_var; if ( (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt @@ -1133,6 +1184,9 @@ statement_newline_rule(Parser *p) p->mark = _mark; } { // NEWLINE + if (p->error_indicator) { + return NULL; + } Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' @@ -1156,6 +1210,9 @@ statement_newline_rule(Parser *p) p->mark = _mark; } { // $ + if (p->error_indicator) { + return NULL; + } Token * endmarker_var; if ( (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' @@ -1185,6 +1242,9 @@ simple_stmt_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // small_stmt !';' NEWLINE + if (p->error_indicator) { + return NULL; + } stmt_ty a; Token * newline_var; if ( @@ -1205,6 +1265,9 @@ simple_stmt_rule(Parser *p) p->mark = _mark; } { // ';'.small_stmt+ ';'? NEWLINE + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -1264,6 +1327,9 @@ small_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // assignment + if (p->error_indicator) { + return NULL; + } stmt_ty assignment_var; if ( (assignment_var = assignment_rule(p)) // assignment @@ -1275,6 +1341,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // star_expressions + if (p->error_indicator) { + return NULL; + } expr_ty e; if ( (e = star_expressions_rule(p)) // star_expressions @@ -1298,6 +1367,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // &'return' return_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty return_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) // token='return' @@ -1311,6 +1383,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // &('import' | 'from') import_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty import_stmt_var; if ( _PyPegen_lookahead(1, _tmp_14_rule, p) @@ -1324,6 +1399,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // &'raise' raise_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty raise_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) // token='raise' @@ -1337,6 +1415,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // 'pass' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 502)) // token='pass' @@ -1360,6 +1441,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // &'del' del_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty del_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) // token='del' @@ -1373,6 +1457,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // &'yield' yield_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty yield_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) // token='yield' @@ -1386,6 +1473,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // &'assert' assert_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty assert_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) // token='assert' @@ -1399,6 +1489,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // 'break' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 506)) // token='break' @@ -1422,6 +1515,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // 'continue' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 507)) // token='continue' @@ -1445,6 +1541,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // &'global' global_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty global_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) // token='global' @@ -1458,6 +1557,9 @@ small_stmt_rule(Parser *p) p->mark = _mark; } { // &'nonlocal' nonlocal_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty nonlocal_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) // token='nonlocal' @@ -1493,6 +1595,9 @@ compound_stmt_rule(Parser *p) stmt_ty _res = NULL; int _mark = p->mark; { // &('def' | '@' | ASYNC) function_def + if (p->error_indicator) { + return NULL; + } stmt_ty function_def_var; if ( _PyPegen_lookahead(1, _tmp_15_rule, p) @@ -1506,6 +1611,9 @@ compound_stmt_rule(Parser *p) p->mark = _mark; } { // &'if' if_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty if_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) // token='if' @@ -1519,6 +1627,9 @@ compound_stmt_rule(Parser *p) p->mark = _mark; } { // &('class' | '@') class_def + if (p->error_indicator) { + return NULL; + } stmt_ty class_def_var; if ( _PyPegen_lookahead(1, _tmp_16_rule, p) @@ -1532,6 +1643,9 @@ compound_stmt_rule(Parser *p) p->mark = _mark; } { // &('with' | ASYNC) with_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty with_stmt_var; if ( _PyPegen_lookahead(1, _tmp_17_rule, p) @@ -1545,6 +1659,9 @@ compound_stmt_rule(Parser *p) p->mark = _mark; } { // &('for' | ASYNC) for_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty for_stmt_var; if ( _PyPegen_lookahead(1, _tmp_18_rule, p) @@ -1558,6 +1675,9 @@ compound_stmt_rule(Parser *p) p->mark = _mark; } { // &'try' try_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty try_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) // token='try' @@ -1571,6 +1691,9 @@ compound_stmt_rule(Parser *p) p->mark = _mark; } { // &'while' while_stmt + if (p->error_indicator) { + return NULL; + } stmt_ty while_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) // token='while' @@ -1611,6 +1734,9 @@ assignment_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':' expression ['=' annotated_rhs] + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -1643,6 +1769,9 @@ assignment_rule(Parser *p) p->mark = _mark; } { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] + if (p->error_indicator) { + return NULL; + } Token * _literal; void *a; expr_ty b; @@ -1675,6 +1804,9 @@ assignment_rule(Parser *p) p->mark = _mark; } { // ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? + if (p->error_indicator) { + return NULL; + } asdl_seq * a; void *b; void *tc; @@ -1704,6 +1836,9 @@ assignment_rule(Parser *p) p->mark = _mark; } { // single_target augassign (yield_expr | star_expressions) + if (p->error_indicator) { + return NULL; + } expr_ty a; AugOperator* b; void *c; @@ -1733,6 +1868,9 @@ assignment_rule(Parser *p) p->mark = _mark; } { // invalid_assignment + if (p->error_indicator) { + return NULL; + } void *invalid_assignment_var; if ( (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment @@ -1771,6 +1909,9 @@ augassign_rule(Parser *p) AugOperator* _res = NULL; int _mark = p->mark; { // '+=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 36)) // token='+=' @@ -1786,6 +1927,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '-=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 37)) // token='-=' @@ -1801,6 +1945,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '*=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 38)) // token='*=' @@ -1816,6 +1963,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '@=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 50)) // token='@=' @@ -1831,6 +1981,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '/=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 39)) // token='/=' @@ -1846,6 +1999,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '%=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 40)) // token='%=' @@ -1861,6 +2017,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '&=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 41)) // token='&=' @@ -1876,6 +2035,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '|=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 42)) // token='|=' @@ -1891,6 +2053,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '^=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 43)) // token='^=' @@ -1906,6 +2071,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '<<=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 44)) // token='<<=' @@ -1921,6 +2089,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '>>=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 45)) // token='>>=' @@ -1936,6 +2107,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '**=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 46)) // token='**=' @@ -1951,6 +2125,9 @@ augassign_rule(Parser *p) p->mark = _mark; } { // '//=' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 48)) // token='//=' @@ -1988,6 +2165,9 @@ global_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'global' ','.NAME+ + if (p->error_indicator) { + return NULL; + } Token * _keyword; asdl_seq * a; if ( @@ -2036,6 +2216,9 @@ nonlocal_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'nonlocal' ','.NAME+ + if (p->error_indicator) { + return NULL; + } Token * _keyword; asdl_seq * a; if ( @@ -2084,6 +2267,9 @@ yield_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // yield_expr + if (p->error_indicator) { + return NULL; + } expr_ty y; if ( (y = yield_expr_rule(p)) // yield_expr @@ -2129,6 +2315,9 @@ assert_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'assert' expression [',' expression] + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty a; void *b; @@ -2180,6 +2369,9 @@ del_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'del' del_targets + if (p->error_indicator) { + return NULL; + } Token * _keyword; asdl_seq* a; if ( @@ -2220,6 +2412,9 @@ import_stmt_rule(Parser *p) stmt_ty _res = NULL; int _mark = p->mark; { // import_name + if (p->error_indicator) { + return NULL; + } stmt_ty import_name_var; if ( (import_name_var = import_name_rule(p)) // import_name @@ -2231,6 +2426,9 @@ import_stmt_rule(Parser *p) p->mark = _mark; } { // import_from + if (p->error_indicator) { + return NULL; + } stmt_ty import_from_var; if ( (import_from_var = import_from_rule(p)) // import_from @@ -2264,6 +2462,9 @@ import_name_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'import' dotted_as_names + if (p->error_indicator) { + return NULL; + } Token * _keyword; asdl_seq* a; if ( @@ -2314,6 +2515,9 @@ import_from_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; asdl_seq * a; @@ -2349,6 +2553,9 @@ import_from_rule(Parser *p) p->mark = _mark; } { // 'from' (('.' | '...'))+ 'import' import_from_targets + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; asdl_seq * a; @@ -2395,6 +2602,9 @@ import_from_targets_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // '(' import_from_as_names ','? ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *_opt_var; @@ -2420,6 +2630,9 @@ import_from_targets_rule(Parser *p) p->mark = _mark; } { // import_from_as_names + if (p->error_indicator) { + return NULL; + } asdl_seq* import_from_as_names_var; if ( (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names @@ -2431,6 +2644,9 @@ import_from_targets_rule(Parser *p) p->mark = _mark; } { // '*' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' @@ -2460,6 +2676,9 @@ import_from_as_names_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.import_from_as_name+ + if (p->error_indicator) { + return NULL; + } asdl_seq * a; if ( (a = _gather_32_rule(p)) // ','.import_from_as_name+ @@ -2489,6 +2708,9 @@ import_from_as_name_rule(Parser *p) alias_ty _res = NULL; int _mark = p->mark; { // NAME ['as' NAME] + if (p->error_indicator) { + return NULL; + } expr_ty a; void *b; if ( @@ -2521,6 +2743,9 @@ dotted_as_names_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.dotted_as_name+ + if (p->error_indicator) { + return NULL; + } asdl_seq * a; if ( (a = _gather_35_rule(p)) // ','.dotted_as_name+ @@ -2550,6 +2775,9 @@ dotted_as_name_rule(Parser *p) alias_ty _res = NULL; int _mark = p->mark; { // dotted_name ['as' NAME] + if (p->error_indicator) { + return NULL; + } expr_ty a; void *b; if ( @@ -2607,6 +2835,9 @@ dotted_name_raw(Parser *p) expr_ty _res = NULL; int _mark = p->mark; { // dotted_name '.' NAME + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -2628,6 +2859,9 @@ dotted_name_raw(Parser *p) p->mark = _mark; } { // NAME + if (p->error_indicator) { + return NULL; + } expr_ty name_var; if ( (name_var = _PyPegen_name_token(p)) // NAME @@ -2663,6 +2897,9 @@ if_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'if' named_expression ':' block elif_stmt + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; expr_ty a; @@ -2698,6 +2935,9 @@ if_stmt_rule(Parser *p) p->mark = _mark; } { // 'if' named_expression ':' block else_block? + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; expr_ty a; @@ -2757,6 +2997,9 @@ elif_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'elif' named_expression ':' block elif_stmt + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; expr_ty a; @@ -2792,6 +3035,9 @@ elif_stmt_rule(Parser *p) p->mark = _mark; } { // 'elif' named_expression ':' block else_block? + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; expr_ty a; @@ -2841,6 +3087,9 @@ else_block_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // 'else' ':' block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; asdl_seq* b; @@ -2884,6 +3133,9 @@ while_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'while' named_expression ':' block else_block? + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; expr_ty a; @@ -2943,6 +3195,9 @@ for_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; Token * _literal; @@ -2987,6 +3242,9 @@ for_stmt_rule(Parser *p) p->mark = _mark; } { // ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; Token * _literal; @@ -3060,6 +3318,9 @@ with_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'with' '(' ','.with_item+ ','? ')' ':' block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; Token * _literal_1; @@ -3102,6 +3363,9 @@ with_stmt_rule(Parser *p) p->mark = _mark; } { // 'with' ','.with_item+ ':' TYPE_COMMENT? block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; asdl_seq * a; @@ -3137,6 +3401,9 @@ with_stmt_rule(Parser *p) p->mark = _mark; } { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; Token * _literal_1; @@ -3182,6 +3449,9 @@ with_stmt_rule(Parser *p) p->mark = _mark; } { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; asdl_seq * a; @@ -3234,6 +3504,9 @@ with_item_rule(Parser *p) withitem_ty _res = NULL; int _mark = p->mark; { // expression ['as' target] + if (p->error_indicator) { + return NULL; + } expr_ty e; void *o; if ( @@ -3276,6 +3549,9 @@ try_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'try' ':' block finally_block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; asdl_seq* b; @@ -3308,6 +3584,9 @@ try_stmt_rule(Parser *p) p->mark = _mark; } { // 'try' ':' block except_block+ else_block? finally_block? + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; asdl_seq* b; @@ -3368,6 +3647,9 @@ except_block_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'except' expression ['as' NAME] ':' block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; asdl_seq* b; @@ -3403,6 +3685,9 @@ except_block_rule(Parser *p) p->mark = _mark; } { // 'except' ':' block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; asdl_seq* b; @@ -3446,6 +3731,9 @@ finally_block_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // 'finally' ':' block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; asdl_seq* a; @@ -3489,6 +3777,9 @@ return_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'return' star_expressions? + if (p->error_indicator) { + return NULL; + } Token * _keyword; void *a; if ( @@ -3537,6 +3828,9 @@ raise_stmt_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'raise' expression ['from' expression] + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty a; void *b; @@ -3566,6 +3860,9 @@ raise_stmt_rule(Parser *p) p->mark = _mark; } { // 'raise' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' @@ -3603,6 +3900,9 @@ function_def_rule(Parser *p) stmt_ty _res = NULL; int _mark = p->mark; { // decorators function_def_raw + if (p->error_indicator) { + return NULL; + } asdl_seq* d; stmt_ty f; if ( @@ -3621,6 +3921,9 @@ function_def_rule(Parser *p) p->mark = _mark; } { // function_def_raw + if (p->error_indicator) { + return NULL; + } stmt_ty function_def_raw_var; if ( (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw @@ -3656,6 +3959,9 @@ function_def_raw_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; Token * _literal_1; @@ -3703,6 +4009,9 @@ function_def_raw_rule(Parser *p) p->mark = _mark; } { // ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; Token * _literal_1; @@ -3770,6 +4079,9 @@ func_type_comment_rule(Parser *p) Token* _res = NULL; int _mark = p->mark; { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) + if (p->error_indicator) { + return NULL; + } Token * newline_var; Token * t; if ( @@ -3790,6 +4102,9 @@ func_type_comment_rule(Parser *p) p->mark = _mark; } { // invalid_double_type_comments + if (p->error_indicator) { + return NULL; + } void *invalid_double_type_comments_var; if ( (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments @@ -3801,6 +4116,9 @@ func_type_comment_rule(Parser *p) p->mark = _mark; } { // TYPE_COMMENT + if (p->error_indicator) { + return NULL; + } Token * type_comment_var; if ( (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' @@ -3826,6 +4144,9 @@ params_rule(Parser *p) arguments_ty _res = NULL; int _mark = p->mark; { // invalid_parameters + if (p->error_indicator) { + return NULL; + } void *invalid_parameters_var; if ( (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters @@ -3837,6 +4158,9 @@ params_rule(Parser *p) p->mark = _mark; } { // parameters + if (p->error_indicator) { + return NULL; + } arguments_ty parameters_var; if ( (parameters_var = parameters_rule(p)) // parameters @@ -3867,6 +4191,9 @@ parameters_rule(Parser *p) arguments_ty _res = NULL; int _mark = p->mark; { // slash_no_default param_no_default* param_with_default* star_etc? + if (p->error_indicator) { + return NULL; + } asdl_seq* a; asdl_seq * b; asdl_seq * c; @@ -3891,6 +4218,9 @@ parameters_rule(Parser *p) p->mark = _mark; } { // slash_with_default param_with_default* star_etc? + if (p->error_indicator) { + return NULL; + } SlashWithDefault* a; asdl_seq * b; void *c; @@ -3912,6 +4242,9 @@ parameters_rule(Parser *p) p->mark = _mark; } { // param_no_default+ param_with_default* star_etc? + if (p->error_indicator) { + return NULL; + } asdl_seq * a; asdl_seq * b; void *c; @@ -3933,6 +4266,9 @@ parameters_rule(Parser *p) p->mark = _mark; } { // param_with_default+ star_etc? + if (p->error_indicator) { + return NULL; + } asdl_seq * a; void *b; if ( @@ -3951,6 +4287,9 @@ parameters_rule(Parser *p) p->mark = _mark; } { // star_etc + if (p->error_indicator) { + return NULL; + } StarEtc* a; if ( (a = star_etc_rule(p)) // star_etc @@ -3980,6 +4319,9 @@ slash_no_default_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // param_no_default+ '/' ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq * a; @@ -4001,6 +4343,9 @@ slash_no_default_rule(Parser *p) p->mark = _mark; } { // param_no_default+ '/' &')' + if (p->error_indicator) { + return NULL; + } Token * _literal; asdl_seq * a; if ( @@ -4037,6 +4382,9 @@ slash_with_default_rule(Parser *p) SlashWithDefault* _res = NULL; int _mark = p->mark; { // param_no_default* param_with_default+ '/' ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq * a; @@ -4061,6 +4409,9 @@ slash_with_default_rule(Parser *p) p->mark = _mark; } { // param_no_default* param_with_default+ '/' &')' + if (p->error_indicator) { + return NULL; + } Token * _literal; asdl_seq * a; asdl_seq * b; @@ -4102,6 +4453,9 @@ star_etc_rule(Parser *p) StarEtc* _res = NULL; int _mark = p->mark; { // '*' param_no_default param_maybe_default* kwds? + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; asdl_seq * b; @@ -4126,6 +4480,9 @@ star_etc_rule(Parser *p) p->mark = _mark; } { // '*' ',' param_maybe_default+ kwds? + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq * b; @@ -4150,6 +4507,9 @@ star_etc_rule(Parser *p) p->mark = _mark; } { // kwds + if (p->error_indicator) { + return NULL; + } arg_ty a; if ( (a = kwds_rule(p)) // kwds @@ -4165,6 +4525,9 @@ star_etc_rule(Parser *p) p->mark = _mark; } { // invalid_star_etc + if (p->error_indicator) { + return NULL; + } void *invalid_star_etc_var; if ( (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc @@ -4190,6 +4553,9 @@ kwds_rule(Parser *p) arg_ty _res = NULL; int _mark = p->mark; { // '**' param_no_default + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; if ( @@ -4222,6 +4588,9 @@ param_no_default_rule(Parser *p) arg_ty _res = NULL; int _mark = p->mark; { // param ',' TYPE_COMMENT? + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; void *tc; @@ -4243,6 +4612,9 @@ param_no_default_rule(Parser *p) p->mark = _mark; } { // param TYPE_COMMENT? &')' + if (p->error_indicator) { + return NULL; + } arg_ty a; void *tc; if ( @@ -4277,6 +4649,9 @@ param_with_default_rule(Parser *p) NameDefaultPair* _res = NULL; int _mark = p->mark; { // param default ',' TYPE_COMMENT? + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; expr_ty c; @@ -4301,6 +4676,9 @@ param_with_default_rule(Parser *p) p->mark = _mark; } { // param default TYPE_COMMENT? &')' + if (p->error_indicator) { + return NULL; + } arg_ty a; expr_ty c; void *tc; @@ -4340,6 +4718,9 @@ param_maybe_default_rule(Parser *p) NameDefaultPair* _res = NULL; int _mark = p->mark; { // param default? ',' TYPE_COMMENT? + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; void *c; @@ -4364,6 +4745,9 @@ param_maybe_default_rule(Parser *p) p->mark = _mark; } { // param default? TYPE_COMMENT? &')' + if (p->error_indicator) { + return NULL; + } arg_ty a; void *c; void *tc; @@ -4409,6 +4793,9 @@ param_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME annotation? + if (p->error_indicator) { + return NULL; + } expr_ty a; void *b; if ( @@ -4449,6 +4836,9 @@ annotation_rule(Parser *p) expr_ty _res = NULL; int _mark = p->mark; { // ':' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -4481,6 +4871,9 @@ default_rule(Parser *p) expr_ty _res = NULL; int _mark = p->mark; { // '=' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -4513,6 +4906,9 @@ decorators_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // (('@' named_expression NEWLINE))+ + if (p->error_indicator) { + return NULL; + } asdl_seq * a; if ( (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ @@ -4542,6 +4938,9 @@ class_def_rule(Parser *p) stmt_ty _res = NULL; int _mark = p->mark; { // decorators class_def_raw + if (p->error_indicator) { + return NULL; + } asdl_seq* a; stmt_ty b; if ( @@ -4560,6 +4959,9 @@ class_def_rule(Parser *p) p->mark = _mark; } { // class_def_raw + if (p->error_indicator) { + return NULL; + } stmt_ty class_def_raw_var; if ( (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw @@ -4593,6 +4995,9 @@ class_def_raw_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'class' NAME ['(' arguments? ')'] ':' block + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; expr_ty a; @@ -4644,6 +5049,9 @@ block_rule(Parser *p) return _res; int _mark = p->mark; { // NEWLINE INDENT statements DEDENT + if (p->error_indicator) { + return NULL; + } asdl_seq* a; Token * dedent_var; Token * indent_var; @@ -4668,6 +5076,9 @@ block_rule(Parser *p) p->mark = _mark; } { // simple_stmt + if (p->error_indicator) { + return NULL; + } asdl_seq* simple_stmt_var; if ( (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt @@ -4679,6 +5090,9 @@ block_rule(Parser *p) p->mark = _mark; } { // invalid_block + if (p->error_indicator) { + return NULL; + } void *invalid_block_var; if ( (invalid_block_var = invalid_block_rule(p)) // invalid_block @@ -4705,6 +5119,9 @@ expressions_list_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.star_expression+ ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -4749,6 +5166,9 @@ star_expressions_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_expression ((',' star_expression))+ ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty a; @@ -4779,6 +5199,9 @@ star_expressions_rule(Parser *p) p->mark = _mark; } { // star_expression ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -4805,6 +5228,9 @@ star_expressions_rule(Parser *p) p->mark = _mark; } { // star_expression + if (p->error_indicator) { + return NULL; + } expr_ty star_expression_var; if ( (star_expression_var = star_expression_rule(p)) // star_expression @@ -4840,6 +5266,9 @@ star_expression_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -4866,6 +5295,9 @@ star_expression_rule(Parser *p) p->mark = _mark; } { // expression + if (p->error_indicator) { + return NULL; + } expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -4892,6 +5324,9 @@ star_named_expressions_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.star_named_expression+ ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -4933,6 +5368,9 @@ star_named_expression_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -4959,6 +5397,9 @@ star_named_expression_rule(Parser *p) p->mark = _mark; } { // named_expression + if (p->error_indicator) { + return NULL; + } expr_ty named_expression_var; if ( (named_expression_var = named_expression_rule(p)) // named_expression @@ -4992,6 +5433,9 @@ named_expression_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':=' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -5021,6 +5465,9 @@ named_expression_rule(Parser *p) p->mark = _mark; } { // expression !':=' + if (p->error_indicator) { + return NULL; + } expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -5034,6 +5481,9 @@ named_expression_rule(Parser *p) p->mark = _mark; } { // invalid_named_expression + if (p->error_indicator) { + return NULL; + } void *invalid_named_expression_var; if ( (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression @@ -5059,6 +5509,9 @@ annotated_rhs_rule(Parser *p) expr_ty _res = NULL; int _mark = p->mark; { // yield_expr + if (p->error_indicator) { + return NULL; + } expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr @@ -5070,6 +5523,9 @@ annotated_rhs_rule(Parser *p) p->mark = _mark; } { // star_expressions + if (p->error_indicator) { + return NULL; + } expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions @@ -5103,6 +5559,9 @@ expressions_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression ((',' expression))+ ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty a; @@ -5133,6 +5592,9 @@ expressions_rule(Parser *p) p->mark = _mark; } { // expression ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -5159,6 +5621,9 @@ expressions_rule(Parser *p) p->mark = _mark; } { // expression + if (p->error_indicator) { + return NULL; + } expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -5194,6 +5659,9 @@ expression_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // disjunction 'if' disjunction 'else' expression + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; expr_ty a; @@ -5229,6 +5697,9 @@ expression_rule(Parser *p) p->mark = _mark; } { // disjunction + if (p->error_indicator) { + return NULL; + } expr_ty disjunction_var; if ( (disjunction_var = disjunction_rule(p)) // disjunction @@ -5240,6 +5711,9 @@ expression_rule(Parser *p) p->mark = _mark; } { // lambdef + if (p->error_indicator) { + return NULL; + } expr_ty lambdef_var; if ( (lambdef_var = lambdef_rule(p)) // lambdef @@ -5274,6 +5748,9 @@ lambdef_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'lambda' lambda_parameters? ':' expression + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _literal; void *a; @@ -5325,6 +5802,9 @@ lambda_parameters_rule(Parser *p) arguments_ty _res = NULL; int _mark = p->mark; { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? + if (p->error_indicator) { + return NULL; + } asdl_seq* a; asdl_seq * b; asdl_seq * c; @@ -5349,6 +5829,9 @@ lambda_parameters_rule(Parser *p) p->mark = _mark; } { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? + if (p->error_indicator) { + return NULL; + } SlashWithDefault* a; asdl_seq * b; void *c; @@ -5370,6 +5853,9 @@ lambda_parameters_rule(Parser *p) p->mark = _mark; } { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? + if (p->error_indicator) { + return NULL; + } asdl_seq * a; asdl_seq * b; void *c; @@ -5391,6 +5877,9 @@ lambda_parameters_rule(Parser *p) p->mark = _mark; } { // lambda_param_with_default+ lambda_star_etc? + if (p->error_indicator) { + return NULL; + } asdl_seq * a; void *b; if ( @@ -5409,6 +5898,9 @@ lambda_parameters_rule(Parser *p) p->mark = _mark; } { // lambda_star_etc + if (p->error_indicator) { + return NULL; + } StarEtc* a; if ( (a = lambda_star_etc_rule(p)) // lambda_star_etc @@ -5440,6 +5932,9 @@ lambda_slash_no_default_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // lambda_param_no_default+ '/' ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq * a; @@ -5461,6 +5956,9 @@ lambda_slash_no_default_rule(Parser *p) p->mark = _mark; } { // lambda_param_no_default+ '/' &':' + if (p->error_indicator) { + return NULL; + } Token * _literal; asdl_seq * a; if ( @@ -5497,6 +5995,9 @@ lambda_slash_with_default_rule(Parser *p) SlashWithDefault* _res = NULL; int _mark = p->mark; { // lambda_param_no_default* lambda_param_with_default+ '/' ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq * a; @@ -5521,6 +6022,9 @@ lambda_slash_with_default_rule(Parser *p) p->mark = _mark; } { // lambda_param_no_default* lambda_param_with_default+ '/' &':' + if (p->error_indicator) { + return NULL; + } Token * _literal; asdl_seq * a; asdl_seq * b; @@ -5562,6 +6066,9 @@ lambda_star_etc_rule(Parser *p) StarEtc* _res = NULL; int _mark = p->mark; { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; asdl_seq * b; @@ -5586,6 +6093,9 @@ lambda_star_etc_rule(Parser *p) p->mark = _mark; } { // '*' ',' lambda_param_maybe_default+ lambda_kwds? + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq * b; @@ -5610,6 +6120,9 @@ lambda_star_etc_rule(Parser *p) p->mark = _mark; } { // lambda_kwds + if (p->error_indicator) { + return NULL; + } arg_ty a; if ( (a = lambda_kwds_rule(p)) // lambda_kwds @@ -5625,6 +6138,9 @@ lambda_star_etc_rule(Parser *p) p->mark = _mark; } { // invalid_lambda_star_etc + if (p->error_indicator) { + return NULL; + } void *invalid_lambda_star_etc_var; if ( (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc @@ -5650,6 +6166,9 @@ lambda_kwds_rule(Parser *p) arg_ty _res = NULL; int _mark = p->mark; { // '**' lambda_param_no_default + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; if ( @@ -5682,6 +6201,9 @@ lambda_param_no_default_rule(Parser *p) arg_ty _res = NULL; int _mark = p->mark; { // lambda_param ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; if ( @@ -5700,6 +6222,9 @@ lambda_param_no_default_rule(Parser *p) p->mark = _mark; } { // lambda_param &':' + if (p->error_indicator) { + return NULL; + } arg_ty a; if ( (a = lambda_param_rule(p)) // lambda_param @@ -5731,6 +6256,9 @@ lambda_param_with_default_rule(Parser *p) NameDefaultPair* _res = NULL; int _mark = p->mark; { // lambda_param default ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; expr_ty c; @@ -5752,6 +6280,9 @@ lambda_param_with_default_rule(Parser *p) p->mark = _mark; } { // lambda_param default &':' + if (p->error_indicator) { + return NULL; + } arg_ty a; expr_ty c; if ( @@ -5786,6 +6317,9 @@ lambda_param_maybe_default_rule(Parser *p) NameDefaultPair* _res = NULL; int _mark = p->mark; { // lambda_param default? ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; arg_ty a; void *c; @@ -5807,6 +6341,9 @@ lambda_param_maybe_default_rule(Parser *p) p->mark = _mark; } { // lambda_param default? &':' + if (p->error_indicator) { + return NULL; + } arg_ty a; void *c; if ( @@ -5849,6 +6386,9 @@ lambda_param_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME @@ -5896,6 +6436,9 @@ disjunction_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // conjunction (('or' conjunction))+ + if (p->error_indicator) { + return NULL; + } expr_ty a; asdl_seq * b; if ( @@ -5922,6 +6465,9 @@ disjunction_rule(Parser *p) p->mark = _mark; } { // conjunction + if (p->error_indicator) { + return NULL; + } expr_ty conjunction_var; if ( (conjunction_var = conjunction_rule(p)) // conjunction @@ -5958,6 +6504,9 @@ conjunction_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // inversion (('and' inversion))+ + if (p->error_indicator) { + return NULL; + } expr_ty a; asdl_seq * b; if ( @@ -5984,6 +6533,9 @@ conjunction_rule(Parser *p) p->mark = _mark; } { // inversion + if (p->error_indicator) { + return NULL; + } expr_ty inversion_var; if ( (inversion_var = inversion_rule(p)) // inversion @@ -6020,6 +6572,9 @@ inversion_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'not' inversion + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty a; if ( @@ -6046,6 +6601,9 @@ inversion_rule(Parser *p) p->mark = _mark; } { // comparison + if (p->error_indicator) { + return NULL; + } expr_ty comparison_var; if ( (comparison_var = comparison_rule(p)) // comparison @@ -6080,6 +6638,9 @@ comparison_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or compare_op_bitwise_or_pair+ + if (p->error_indicator) { + return NULL; + } expr_ty a; asdl_seq * b; if ( @@ -6106,6 +6667,9 @@ comparison_rule(Parser *p) p->mark = _mark; } { // bitwise_or + if (p->error_indicator) { + return NULL; + } expr_ty bitwise_or_var; if ( (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or @@ -6141,6 +6705,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // eq_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* eq_bitwise_or_var; if ( (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or @@ -6152,6 +6719,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) p->mark = _mark; } { // noteq_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* noteq_bitwise_or_var; if ( (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or @@ -6163,6 +6733,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) p->mark = _mark; } { // lte_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* lte_bitwise_or_var; if ( (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or @@ -6174,6 +6747,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) p->mark = _mark; } { // lt_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* lt_bitwise_or_var; if ( (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or @@ -6185,6 +6761,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) p->mark = _mark; } { // gte_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* gte_bitwise_or_var; if ( (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or @@ -6196,6 +6775,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) p->mark = _mark; } { // gt_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* gt_bitwise_or_var; if ( (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or @@ -6207,6 +6789,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) p->mark = _mark; } { // notin_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* notin_bitwise_or_var; if ( (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or @@ -6218,6 +6803,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) p->mark = _mark; } { // in_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* in_bitwise_or_var; if ( (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or @@ -6229,6 +6817,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) p->mark = _mark; } { // isnot_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* isnot_bitwise_or_var; if ( (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or @@ -6240,6 +6831,9 @@ compare_op_bitwise_or_pair_rule(Parser *p) p->mark = _mark; } { // is_bitwise_or + if (p->error_indicator) { + return NULL; + } CmpopExprPair* is_bitwise_or_var; if ( (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or @@ -6265,6 +6859,9 @@ eq_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // '==' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -6297,6 +6894,9 @@ noteq_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // ('!=') bitwise_or + if (p->error_indicator) { + return NULL; + } void *_tmp_92_var; expr_ty a; if ( @@ -6329,6 +6929,9 @@ lte_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // '<=' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -6361,6 +6964,9 @@ lt_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // '<' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -6393,6 +6999,9 @@ gte_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // '>=' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -6425,6 +7034,9 @@ gt_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // '>' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -6457,6 +7069,9 @@ notin_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // 'not' 'in' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; expr_ty a; @@ -6492,6 +7107,9 @@ in_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // 'in' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty a; if ( @@ -6524,6 +7142,9 @@ isnot_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // 'is' 'not' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; expr_ty a; @@ -6559,6 +7180,9 @@ is_bitwise_or_rule(Parser *p) CmpopExprPair* _res = NULL; int _mark = p->mark; { // 'is' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty a; if ( @@ -6624,6 +7248,9 @@ bitwise_or_raw(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or '|' bitwise_xor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -6653,6 +7280,9 @@ bitwise_or_raw(Parser *p) p->mark = _mark; } { // bitwise_xor + if (p->error_indicator) { + return NULL; + } expr_ty bitwise_xor_var; if ( (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor @@ -6711,6 +7341,9 @@ bitwise_xor_raw(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_xor '^' bitwise_and + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -6740,6 +7373,9 @@ bitwise_xor_raw(Parser *p) p->mark = _mark; } { // bitwise_and + if (p->error_indicator) { + return NULL; + } expr_ty bitwise_and_var; if ( (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and @@ -6798,6 +7434,9 @@ bitwise_and_raw(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_and '&' shift_expr + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -6827,6 +7466,9 @@ bitwise_and_raw(Parser *p) p->mark = _mark; } { // shift_expr + if (p->error_indicator) { + return NULL; + } expr_ty shift_expr_var; if ( (shift_expr_var = shift_expr_rule(p)) // shift_expr @@ -6885,6 +7527,9 @@ shift_expr_raw(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // shift_expr '<<' sum + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -6914,6 +7559,9 @@ shift_expr_raw(Parser *p) p->mark = _mark; } { // shift_expr '>>' sum + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -6943,6 +7591,9 @@ shift_expr_raw(Parser *p) p->mark = _mark; } { // sum + if (p->error_indicator) { + return NULL; + } expr_ty sum_var; if ( (sum_var = sum_rule(p)) // sum @@ -7001,6 +7652,9 @@ sum_raw(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // sum '+' term + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -7030,6 +7684,9 @@ sum_raw(Parser *p) p->mark = _mark; } { // sum '-' term + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -7059,6 +7716,9 @@ sum_raw(Parser *p) p->mark = _mark; } { // term + if (p->error_indicator) { + return NULL; + } expr_ty term_var; if ( (term_var = term_rule(p)) // term @@ -7123,6 +7783,9 @@ term_raw(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // term '*' factor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -7152,6 +7815,9 @@ term_raw(Parser *p) p->mark = _mark; } { // term '/' factor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -7181,6 +7847,9 @@ term_raw(Parser *p) p->mark = _mark; } { // term '//' factor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -7210,6 +7879,9 @@ term_raw(Parser *p) p->mark = _mark; } { // term '%' factor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -7239,6 +7911,9 @@ term_raw(Parser *p) p->mark = _mark; } { // term '@' factor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -7268,6 +7943,9 @@ term_raw(Parser *p) p->mark = _mark; } { // factor + if (p->error_indicator) { + return NULL; + } expr_ty factor_var; if ( (factor_var = factor_rule(p)) // factor @@ -7303,6 +7981,9 @@ factor_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '+' factor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -7329,6 +8010,9 @@ factor_rule(Parser *p) p->mark = _mark; } { // '-' factor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -7355,6 +8039,9 @@ factor_rule(Parser *p) p->mark = _mark; } { // '~' factor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -7381,6 +8068,9 @@ factor_rule(Parser *p) p->mark = _mark; } { // power + if (p->error_indicator) { + return NULL; + } expr_ty power_var; if ( (power_var = power_rule(p)) // power @@ -7415,6 +8105,9 @@ power_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // await_primary '**' factor + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -7444,6 +8137,9 @@ power_rule(Parser *p) p->mark = _mark; } { // await_primary + if (p->error_indicator) { + return NULL; + } expr_ty await_primary_var; if ( (await_primary_var = await_primary_rule(p)) // await_primary @@ -7479,6 +8175,9 @@ await_primary_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // AWAIT primary + if (p->error_indicator) { + return NULL; + } expr_ty a; Token * await_var; if ( @@ -7505,6 +8204,9 @@ await_primary_rule(Parser *p) p->mark = _mark; } { // primary + if (p->error_indicator) { + return NULL; + } expr_ty primary_var; if ( (primary_var = primary_rule(p)) // primary @@ -7569,6 +8271,9 @@ primary_raw(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // primary '.' NAME + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -7598,6 +8303,9 @@ primary_raw(Parser *p) p->mark = _mark; } { // primary genexp + if (p->error_indicator) { + return NULL; + } expr_ty a; expr_ty b; if ( @@ -7624,6 +8332,9 @@ primary_raw(Parser *p) p->mark = _mark; } { // primary '(' arguments? ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -7656,6 +8367,9 @@ primary_raw(Parser *p) p->mark = _mark; } { // primary '[' slices ']' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -7688,6 +8402,9 @@ primary_raw(Parser *p) p->mark = _mark; } { // atom + if (p->error_indicator) { + return NULL; + } expr_ty atom_var; if ( (atom_var = atom_rule(p)) // atom @@ -7721,6 +8438,9 @@ slices_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // slice !',' + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = slice_rule(p)) // slice @@ -7738,6 +8458,9 @@ slices_rule(Parser *p) p->mark = _mark; } { // ','.slice+ ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -7787,6 +8510,9 @@ slice_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression? ':' expression? [':' expression?] + if (p->error_indicator) { + return NULL; + } Token * _literal; void *a; void *b; @@ -7819,6 +8545,9 @@ slice_rule(Parser *p) p->mark = _mark; } { // expression + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = expression_rule(p)) // expression @@ -7867,6 +8596,9 @@ atom_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME + if (p->error_indicator) { + return NULL; + } expr_ty name_var; if ( (name_var = _PyPegen_name_token(p)) // NAME @@ -7878,6 +8610,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // 'True' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 527)) // token='True' @@ -7901,6 +8636,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // 'False' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 528)) // token='False' @@ -7924,6 +8662,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // 'None' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 529)) // token='None' @@ -7947,6 +8688,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // '__new_parser__' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' @@ -7962,6 +8706,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // &STRING strings + if (p->error_indicator) { + return NULL; + } expr_ty strings_var; if ( _PyPegen_lookahead(1, _PyPegen_string_token, p) @@ -7975,6 +8722,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // NUMBER + if (p->error_indicator) { + return NULL; + } expr_ty number_var; if ( (number_var = _PyPegen_number_token(p)) // NUMBER @@ -7986,6 +8736,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // &'(' (tuple | group | genexp) + if (p->error_indicator) { + return NULL; + } void *_tmp_96_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='(' @@ -7999,6 +8752,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // &'[' (list | listcomp) + if (p->error_indicator) { + return NULL; + } void *_tmp_97_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='[' @@ -8012,6 +8768,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // &'{' (dict | set | dictcomp | setcomp) + if (p->error_indicator) { + return NULL; + } void *_tmp_98_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{' @@ -8025,6 +8784,9 @@ atom_rule(Parser *p) p->mark = _mark; } { // '...' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' @@ -8064,6 +8826,9 @@ strings_rule(Parser *p) return _res; int _mark = p->mark; { // STRING+ + if (p->error_indicator) { + return NULL; + } asdl_seq * a; if ( (a = _loop1_99_rule(p)) // STRING+ @@ -8102,6 +8867,9 @@ list_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' star_named_expressions? ']' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *a; @@ -8153,6 +8921,9 @@ listcomp_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' named_expression for_if_clauses ']' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -8185,6 +8956,9 @@ listcomp_rule(Parser *p) p->mark = _mark; } { // invalid_comprehension + if (p->error_indicator) { + return NULL; + } void *invalid_comprehension_var; if ( (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension @@ -8218,6 +8992,9 @@ tuple_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' [star_named_expression ',' star_named_expressions?] ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *a; @@ -8261,6 +9038,9 @@ group_rule(Parser *p) expr_ty _res = NULL; int _mark = p->mark; { // '(' (yield_expr | named_expression) ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *a; @@ -8304,6 +9084,9 @@ genexp_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' expression for_if_clauses ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -8336,6 +9119,9 @@ genexp_rule(Parser *p) p->mark = _mark; } { // invalid_comprehension + if (p->error_indicator) { + return NULL; + } void *invalid_comprehension_var; if ( (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension @@ -8369,6 +9155,9 @@ set_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expressions_list '}' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq* a; @@ -8420,6 +9209,9 @@ setcomp_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expression for_if_clauses '}' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -8452,6 +9244,9 @@ setcomp_rule(Parser *p) p->mark = _mark; } { // invalid_comprehension + if (p->error_indicator) { + return NULL; + } void *invalid_comprehension_var; if ( (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension @@ -8485,6 +9280,9 @@ dict_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' kvpairs? '}' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *a; @@ -8536,6 +9334,9 @@ dictcomp_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' kvpair for_if_clauses '}' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; KeyValuePair* a; @@ -8582,6 +9383,9 @@ kvpairs_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.kvpair+ ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -8615,6 +9419,9 @@ kvpair_rule(Parser *p) KeyValuePair* _res = NULL; int _mark = p->mark; { // '**' bitwise_or + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -8633,6 +9440,9 @@ kvpair_rule(Parser *p) p->mark = _mark; } { // expression ':' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -8668,6 +9478,9 @@ for_if_clauses_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // for_if_clause+ + if (p->error_indicator) { + return NULL; + } asdl_seq * _loop1_104_var; if ( (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ @@ -8695,6 +9508,9 @@ for_if_clause_rule(Parser *p) comprehension_ty _res = NULL; int _mark = p->mark; { // ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; expr_ty a; @@ -8725,6 +9541,9 @@ for_if_clause_rule(Parser *p) p->mark = _mark; } { // 'for' star_targets 'in' disjunction (('if' disjunction))* + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; expr_ty a; @@ -8774,6 +9593,9 @@ yield_expr_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'yield' 'from' expression + if (p->error_indicator) { + return NULL; + } Token * _keyword; Token * _keyword_1; expr_ty a; @@ -8803,6 +9625,9 @@ yield_expr_rule(Parser *p) p->mark = _mark; } { // 'yield' star_expressions? + if (p->error_indicator) { + return NULL; + } Token * _keyword; void *a; if ( @@ -8845,6 +9670,9 @@ arguments_rule(Parser *p) return _res; int _mark = p->mark; { // args ','? &')' + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty a; @@ -8866,6 +9694,9 @@ arguments_rule(Parser *p) p->mark = _mark; } { // incorrect_arguments + if (p->error_indicator) { + return NULL; + } void *incorrect_arguments_var; if ( (incorrect_arguments_var = incorrect_arguments_rule(p)) // incorrect_arguments @@ -8900,6 +9731,9 @@ args_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // starred_expression [',' args] + if (p->error_indicator) { + return NULL; + } expr_ty a; void *b; if ( @@ -8926,6 +9760,9 @@ args_rule(Parser *p) p->mark = _mark; } { // kwargs + if (p->error_indicator) { + return NULL; + } asdl_seq* a; if ( (a = kwargs_rule(p)) // kwargs @@ -8949,6 +9786,9 @@ args_rule(Parser *p) p->mark = _mark; } { // named_expression [',' args] + if (p->error_indicator) { + return NULL; + } expr_ty a; void *b; if ( @@ -8992,6 +9832,9 @@ kwargs_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ + if (p->error_indicator) { + return NULL; + } Token * _literal; asdl_seq * a; asdl_seq * b; @@ -9013,6 +9856,9 @@ kwargs_rule(Parser *p) p->mark = _mark; } { // ','.kwarg_or_starred+ + if (p->error_indicator) { + return NULL; + } asdl_seq * _gather_113_var; if ( (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ @@ -9024,6 +9870,9 @@ kwargs_rule(Parser *p) p->mark = _mark; } { // ','.kwarg_or_double_starred+ + if (p->error_indicator) { + return NULL; + } asdl_seq * _gather_115_var; if ( (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ @@ -9057,6 +9906,9 @@ starred_expression_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -9105,6 +9957,9 @@ kwarg_or_starred_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -9134,6 +9989,9 @@ kwarg_or_starred_rule(Parser *p) p->mark = _mark; } { // starred_expression + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = starred_expression_rule(p)) // starred_expression @@ -9149,6 +10007,9 @@ kwarg_or_starred_rule(Parser *p) p->mark = _mark; } { // invalid_kwarg + if (p->error_indicator) { + return NULL; + } void *invalid_kwarg_var; if ( (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg @@ -9182,6 +10043,9 @@ kwarg_or_double_starred_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -9211,6 +10075,9 @@ kwarg_or_double_starred_rule(Parser *p) p->mark = _mark; } { // '**' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -9237,6 +10104,9 @@ kwarg_or_double_starred_rule(Parser *p) p->mark = _mark; } { // invalid_kwarg + if (p->error_indicator) { + return NULL; + } void *invalid_kwarg_var; if ( (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg @@ -9270,6 +10140,9 @@ star_targets_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_target !',' + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = star_target_rule(p)) // star_target @@ -9287,6 +10160,9 @@ star_targets_rule(Parser *p) p->mark = _mark; } { // star_target ((',' star_target))* ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty a; @@ -9331,6 +10207,9 @@ star_targets_seq_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.star_target+ ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -9378,6 +10257,9 @@ star_target_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' (!'*' star_target) + if (p->error_indicator) { + return NULL; + } Token * _literal; void *a; if ( @@ -9404,6 +10286,9 @@ star_target_rule(Parser *p) p->mark = _mark; } { // t_primary '.' NAME !t_lookahead + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -9435,6 +10320,9 @@ star_target_rule(Parser *p) p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -9469,6 +10357,9 @@ star_target_rule(Parser *p) p->mark = _mark; } { // star_atom + if (p->error_indicator) { + return NULL; + } expr_ty star_atom_var; if ( (star_atom_var = star_atom_rule(p)) // star_atom @@ -9507,6 +10398,9 @@ star_atom_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME @@ -9522,6 +10416,9 @@ star_atom_rule(Parser *p) p->mark = _mark; } { // '(' star_target ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -9543,6 +10440,9 @@ star_atom_rule(Parser *p) p->mark = _mark; } { // '(' star_targets_seq? ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *a; @@ -9572,6 +10472,9 @@ star_atom_rule(Parser *p) p->mark = _mark; } { // '[' star_targets_seq? ']' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *a; @@ -9615,6 +10518,9 @@ single_target_rule(Parser *p) expr_ty _res = NULL; int _mark = p->mark; { // single_subscript_attribute_target + if (p->error_indicator) { + return NULL; + } expr_ty single_subscript_attribute_target_var; if ( (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target @@ -9626,6 +10532,9 @@ single_target_rule(Parser *p) p->mark = _mark; } { // NAME + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME @@ -9641,6 +10550,9 @@ single_target_rule(Parser *p) p->mark = _mark; } { // '(' single_target ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -9686,6 +10598,9 @@ single_subscript_attribute_target_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -9717,6 +10632,9 @@ single_subscript_attribute_target_rule(Parser *p) p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -9765,6 +10683,9 @@ del_targets_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.del_target+ ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -9811,6 +10732,9 @@ del_target_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME &del_target_end + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -9842,6 +10766,9 @@ del_target_rule(Parser *p) p->mark = _mark; } { // t_primary '[' slices ']' &del_target_end + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -9876,6 +10803,9 @@ del_target_rule(Parser *p) p->mark = _mark; } { // del_t_atom + if (p->error_indicator) { + return NULL; + } expr_ty del_t_atom_var; if ( (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom @@ -9915,6 +10845,9 @@ del_t_atom_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME &del_target_end + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME @@ -9932,6 +10865,9 @@ del_t_atom_rule(Parser *p) p->mark = _mark; } { // '(' del_target ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -9953,6 +10889,9 @@ del_t_atom_rule(Parser *p) p->mark = _mark; } { // '(' del_targets? ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *a; @@ -9982,6 +10921,9 @@ del_t_atom_rule(Parser *p) p->mark = _mark; } { // '[' del_targets? ']' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *a; @@ -10011,6 +10953,9 @@ del_t_atom_rule(Parser *p) p->mark = _mark; } { // invalid_del_target + if (p->error_indicator) { + return NULL; + } void *invalid_del_target_var; if ( (invalid_del_target_var = invalid_del_target_rule(p)) // invalid_del_target @@ -10036,6 +10981,9 @@ del_target_end_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' @@ -10047,6 +10995,9 @@ del_target_end_rule(Parser *p) p->mark = _mark; } { // ']' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 10)) // token=']' @@ -10058,6 +11009,9 @@ del_target_end_rule(Parser *p) p->mark = _mark; } { // ',' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' @@ -10069,6 +11023,9 @@ del_target_end_rule(Parser *p) p->mark = _mark; } { // ';' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 13)) // token=';' @@ -10080,6 +11037,9 @@ del_target_end_rule(Parser *p) p->mark = _mark; } { // NEWLINE + if (p->error_indicator) { + return NULL; + } Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' @@ -10105,6 +11065,9 @@ targets_rule(Parser *p) asdl_seq* _res = NULL; int _mark = p->mark; { // ','.target+ ','? + if (p->error_indicator) { + return NULL; + } void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -10151,6 +11114,9 @@ target_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -10182,6 +11148,9 @@ target_rule(Parser *p) p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -10216,6 +11185,9 @@ target_rule(Parser *p) p->mark = _mark; } { // t_atom + if (p->error_indicator) { + return NULL; + } expr_ty t_atom_var; if ( (t_atom_var = t_atom_rule(p)) // t_atom @@ -10280,6 +11252,9 @@ t_primary_raw(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME &t_lookahead + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty b; @@ -10311,6 +11286,9 @@ t_primary_raw(Parser *p) p->mark = _mark; } { // t_primary '[' slices ']' &t_lookahead + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -10345,6 +11323,9 @@ t_primary_raw(Parser *p) p->mark = _mark; } { // t_primary genexp &t_lookahead + if (p->error_indicator) { + return NULL; + } expr_ty a; expr_ty b; if ( @@ -10373,6 +11354,9 @@ t_primary_raw(Parser *p) p->mark = _mark; } { // t_primary '(' arguments? ')' &t_lookahead + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -10407,6 +11391,9 @@ t_primary_raw(Parser *p) p->mark = _mark; } { // atom &t_lookahead + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = atom_rule(p)) // atom @@ -10438,6 +11425,9 @@ t_lookahead_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '(' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' @@ -10449,6 +11439,9 @@ t_lookahead_rule(Parser *p) p->mark = _mark; } { // '[' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' @@ -10460,6 +11453,9 @@ t_lookahead_rule(Parser *p) p->mark = _mark; } { // '.' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' @@ -10493,6 +11489,9 @@ t_atom_rule(Parser *p) int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME @@ -10508,6 +11507,9 @@ t_atom_rule(Parser *p) p->mark = _mark; } { // '(' target ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty a; @@ -10529,6 +11531,9 @@ t_atom_rule(Parser *p) p->mark = _mark; } { // '(' targets? ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *b; @@ -10558,6 +11563,9 @@ t_atom_rule(Parser *p) p->mark = _mark; } { // '[' targets? ']' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *b; @@ -10604,6 +11612,9 @@ incorrect_arguments_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // args ',' '*' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty args_var; @@ -10625,6 +11636,9 @@ incorrect_arguments_rule(Parser *p) p->mark = _mark; } { // expression for_if_clauses ',' [args | expression for_if_clauses] + if (p->error_indicator) { + return NULL; + } Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -10650,6 +11664,9 @@ incorrect_arguments_rule(Parser *p) p->mark = _mark; } { // args ',' args + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty args_var; @@ -10685,6 +11702,9 @@ invalid_kwarg_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // expression '=' + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -10717,6 +11737,9 @@ invalid_named_expression_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // expression ':=' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; expr_ty expression_var; @@ -10758,6 +11781,9 @@ invalid_assignment_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // list ':' + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -10776,6 +11802,9 @@ invalid_assignment_rule(Parser *p) p->mark = _mark; } { // tuple ':' + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty a; if ( @@ -10794,6 +11823,9 @@ invalid_assignment_rule(Parser *p) p->mark = _mark; } { // star_named_expression ',' star_named_expressions* ':' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; asdl_seq * _loop0_126_var; @@ -10818,6 +11850,9 @@ invalid_assignment_rule(Parser *p) p->mark = _mark; } { // expression ':' expression ['=' annotated_rhs] + if (p->error_indicator) { + return NULL; + } Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -10843,6 +11878,9 @@ invalid_assignment_rule(Parser *p) p->mark = _mark; } { // star_expressions '=' (yield_expr | star_expressions) + if (p->error_indicator) { + return NULL; + } Token * _literal; void *_tmp_128_var; expr_ty a; @@ -10864,6 +11902,9 @@ invalid_assignment_rule(Parser *p) p->mark = _mark; } { // star_expressions augassign (yield_expr | star_expressions) + if (p->error_indicator) { + return NULL; + } void *_tmp_129_var; expr_ty a; AugOperator* augassign_var; @@ -10899,6 +11940,9 @@ invalid_block_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // NEWLINE !INDENT + if (p->error_indicator) { + return NULL; + } Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' @@ -10930,6 +11974,9 @@ invalid_comprehension_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ('[' | '(' | '{') starred_expression for_if_clauses + if (p->error_indicator) { + return NULL; + } void *_tmp_130_var; expr_ty a; asdl_seq* for_if_clauses_var; @@ -10966,6 +12013,9 @@ invalid_parameters_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // param_no_default* (slash_with_default | param_with_default+) param_no_default + if (p->error_indicator) { + return NULL; + } asdl_seq * _loop0_131_var; void *_tmp_132_var; arg_ty param_no_default_var; @@ -11001,6 +12051,9 @@ invalid_star_etc_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '*' (')' | ',' (')' | '**')) + if (p->error_indicator) { + return NULL; + } Token * _literal; void *_tmp_133_var; if ( @@ -11033,6 +12086,9 @@ invalid_lambda_star_etc_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '*' (':' | ',' (':' | '**')) + if (p->error_indicator) { + return NULL; + } Token * _literal; void *_tmp_134_var; if ( @@ -11065,6 +12121,9 @@ invalid_double_type_comments_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT + if (p->error_indicator) { + return NULL; + } Token * indent_var; Token * newline_var; Token * newline_var_1; @@ -11106,6 +12165,9 @@ invalid_del_target_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // star_expression &del_target_end + if (p->error_indicator) { + return NULL; + } expr_ty a; if ( (a = star_expression_rule(p)) // star_expression @@ -11146,6 +12208,9 @@ _loop0_1_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // NEWLINE + if (p->error_indicator) { + return NULL; + } Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' @@ -11199,6 +12264,9 @@ _loop0_2_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // NEWLINE + if (p->error_indicator) { + return NULL; + } Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' @@ -11252,6 +12320,9 @@ _loop0_4_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -11304,6 +12375,9 @@ _gather_3_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // expression _loop0_4 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -11341,6 +12415,9 @@ _loop0_6_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -11393,6 +12470,9 @@ _gather_5_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // expression _loop0_6 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -11430,6 +12510,9 @@ _loop0_8_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -11482,6 +12565,9 @@ _gather_7_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // expression _loop0_8 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -11519,6 +12605,9 @@ _loop0_10_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -11571,6 +12660,9 @@ _gather_9_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // expression _loop0_10 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -11608,6 +12700,9 @@ _loop1_11_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // statement + if (p->error_indicator) { + return NULL; + } asdl_seq* statement_var; while ( (statement_var = statement_rule(p)) // statement @@ -11665,6 +12760,9 @@ _loop0_13_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ';' small_stmt + if (p->error_indicator) { + return NULL; + } Token * _literal; stmt_ty elem; while ( @@ -11717,6 +12815,9 @@ _gather_12_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // small_stmt _loop0_13 + if (p->error_indicator) { + return NULL; + } stmt_ty elem; asdl_seq * seq; if ( @@ -11745,6 +12846,9 @@ _tmp_14_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'import' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 513)) // token='import' @@ -11756,6 +12860,9 @@ _tmp_14_rule(Parser *p) p->mark = _mark; } { // 'from' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 514)) // token='from' @@ -11781,6 +12888,9 @@ _tmp_15_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'def' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 522)) // token='def' @@ -11792,6 +12902,9 @@ _tmp_15_rule(Parser *p) p->mark = _mark; } { // '@' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 49)) // token='@' @@ -11803,6 +12916,9 @@ _tmp_15_rule(Parser *p) p->mark = _mark; } { // ASYNC + if (p->error_indicator) { + return NULL; + } Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' @@ -11828,6 +12944,9 @@ _tmp_16_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'class' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 523)) // token='class' @@ -11839,6 +12958,9 @@ _tmp_16_rule(Parser *p) p->mark = _mark; } { // '@' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 49)) // token='@' @@ -11864,6 +12986,9 @@ _tmp_17_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'with' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 519)) // token='with' @@ -11875,6 +13000,9 @@ _tmp_17_rule(Parser *p) p->mark = _mark; } { // ASYNC + if (p->error_indicator) { + return NULL; + } Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' @@ -11900,6 +13028,9 @@ _tmp_18_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'for' + if (p->error_indicator) { + return NULL; + } Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 517)) // token='for' @@ -11911,6 +13042,9 @@ _tmp_18_rule(Parser *p) p->mark = _mark; } { // ASYNC + if (p->error_indicator) { + return NULL; + } Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' @@ -11936,6 +13070,9 @@ _tmp_19_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '=' annotated_rhs + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty d; if ( @@ -11968,6 +13105,9 @@ _tmp_20_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '(' single_target ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; expr_ty b; @@ -11989,6 +13129,9 @@ _tmp_20_rule(Parser *p) p->mark = _mark; } { // single_subscript_attribute_target + if (p->error_indicator) { + return NULL; + } expr_ty single_subscript_attribute_target_var; if ( (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target @@ -12014,6 +13157,9 @@ _tmp_21_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '=' annotated_rhs + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty d; if ( @@ -12055,6 +13201,9 @@ _loop1_22_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (star_targets '=') + if (p->error_indicator) { + return NULL; + } void *_tmp_135_var; while ( (_tmp_135_var = _tmp_135_rule(p)) // star_targets '=' @@ -12103,6 +13252,9 @@ _tmp_23_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // yield_expr + if (p->error_indicator) { + return NULL; + } expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr @@ -12114,6 +13266,9 @@ _tmp_23_rule(Parser *p) p->mark = _mark; } { // star_expressions + if (p->error_indicator) { + return NULL; + } expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions @@ -12139,6 +13294,9 @@ _tmp_24_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // yield_expr + if (p->error_indicator) { + return NULL; + } expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr @@ -12150,6 +13308,9 @@ _tmp_24_rule(Parser *p) p->mark = _mark; } { // star_expressions + if (p->error_indicator) { + return NULL; + } expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions @@ -12184,6 +13345,9 @@ _loop0_26_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' NAME + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -12236,6 +13400,9 @@ _gather_25_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // NAME _loop0_26 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -12273,6 +13440,9 @@ _loop0_28_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' NAME + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -12325,6 +13495,9 @@ _gather_27_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // NAME _loop0_28 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -12353,6 +13526,9 @@ _tmp_29_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ',' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty z; if ( @@ -12394,6 +13570,9 @@ _loop0_30_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('.' | '...') + if (p->error_indicator) { + return NULL; + } void *_tmp_136_var; while ( (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' @@ -12447,6 +13626,9 @@ _loop1_31_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('.' | '...') + if (p->error_indicator) { + return NULL; + } void *_tmp_137_var; while ( (_tmp_137_var = _tmp_137_rule(p)) // '.' | '...' @@ -12504,6 +13686,9 @@ _loop0_33_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' import_from_as_name + if (p->error_indicator) { + return NULL; + } Token * _literal; alias_ty elem; while ( @@ -12556,6 +13741,9 @@ _gather_32_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // import_from_as_name _loop0_33 + if (p->error_indicator) { + return NULL; + } alias_ty elem; asdl_seq * seq; if ( @@ -12584,6 +13772,9 @@ _tmp_34_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'as' NAME + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty z; if ( @@ -12625,6 +13816,9 @@ _loop0_36_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' dotted_as_name + if (p->error_indicator) { + return NULL; + } Token * _literal; alias_ty elem; while ( @@ -12677,6 +13871,9 @@ _gather_35_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // dotted_as_name _loop0_36 + if (p->error_indicator) { + return NULL; + } alias_ty elem; asdl_seq * seq; if ( @@ -12705,6 +13902,9 @@ _tmp_37_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'as' NAME + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty z; if ( @@ -12746,6 +13946,9 @@ _loop0_39_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' with_item + if (p->error_indicator) { + return NULL; + } Token * _literal; withitem_ty elem; while ( @@ -12798,6 +14001,9 @@ _gather_38_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // with_item _loop0_39 + if (p->error_indicator) { + return NULL; + } withitem_ty elem; asdl_seq * seq; if ( @@ -12835,6 +14041,9 @@ _loop0_41_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' with_item + if (p->error_indicator) { + return NULL; + } Token * _literal; withitem_ty elem; while ( @@ -12887,6 +14096,9 @@ _gather_40_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // with_item _loop0_41 + if (p->error_indicator) { + return NULL; + } withitem_ty elem; asdl_seq * seq; if ( @@ -12924,6 +14136,9 @@ _loop0_43_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' with_item + if (p->error_indicator) { + return NULL; + } Token * _literal; withitem_ty elem; while ( @@ -12976,6 +14191,9 @@ _gather_42_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // with_item _loop0_43 + if (p->error_indicator) { + return NULL; + } withitem_ty elem; asdl_seq * seq; if ( @@ -13013,6 +14231,9 @@ _loop0_45_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' with_item + if (p->error_indicator) { + return NULL; + } Token * _literal; withitem_ty elem; while ( @@ -13065,6 +14286,9 @@ _gather_44_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // with_item _loop0_45 + if (p->error_indicator) { + return NULL; + } withitem_ty elem; asdl_seq * seq; if ( @@ -13093,6 +14317,9 @@ _tmp_46_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'as' target + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty t; if ( @@ -13134,6 +14361,9 @@ _loop1_47_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // except_block + if (p->error_indicator) { + return NULL; + } excepthandler_ty except_block_var; while ( (except_block_var = except_block_rule(p)) // except_block @@ -13182,6 +14412,9 @@ _tmp_48_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'as' NAME + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty z; if ( @@ -13214,6 +14447,9 @@ _tmp_49_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'from' expression + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty z; if ( @@ -13246,6 +14482,9 @@ _tmp_50_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '->' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty z; if ( @@ -13278,6 +14517,9 @@ _tmp_51_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '->' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty z; if ( @@ -13310,6 +14552,9 @@ _tmp_52_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // NEWLINE INDENT + if (p->error_indicator) { + return NULL; + } Token * indent_var; Token * newline_var; if ( @@ -13347,6 +14592,9 @@ _loop0_53_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -13400,6 +14648,9 @@ _loop0_54_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -13453,6 +14704,9 @@ _loop0_55_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -13506,6 +14760,9 @@ _loop1_56_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -13563,6 +14820,9 @@ _loop0_57_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -13616,6 +14876,9 @@ _loop1_58_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -13673,6 +14936,9 @@ _loop1_59_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -13730,6 +14996,9 @@ _loop1_60_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -13787,6 +15056,9 @@ _loop0_61_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -13840,6 +15112,9 @@ _loop1_62_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -13897,6 +15172,9 @@ _loop0_63_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -13950,6 +15228,9 @@ _loop1_64_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -14007,6 +15288,9 @@ _loop0_65_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_maybe_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -14060,6 +15344,9 @@ _loop1_66_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_maybe_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -14117,6 +15404,9 @@ _loop1_67_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('@' named_expression NEWLINE) + if (p->error_indicator) { + return NULL; + } void *_tmp_138_var; while ( (_tmp_138_var = _tmp_138_rule(p)) // '@' named_expression NEWLINE @@ -14165,6 +15455,9 @@ _tmp_68_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '(' arguments? ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; void *z; @@ -14209,6 +15502,9 @@ _loop0_70_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' star_expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -14261,6 +15557,9 @@ _gather_69_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // star_expression _loop0_70 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -14298,6 +15597,9 @@ _loop1_71_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' star_expression) + if (p->error_indicator) { + return NULL; + } void *_tmp_139_var; while ( (_tmp_139_var = _tmp_139_rule(p)) // ',' star_expression @@ -14355,6 +15657,9 @@ _loop0_73_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' star_named_expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -14407,6 +15712,9 @@ _gather_72_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // star_named_expression _loop0_73 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -14444,6 +15752,9 @@ _loop1_74_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' expression) + if (p->error_indicator) { + return NULL; + } void *_tmp_140_var; while ( (_tmp_140_var = _tmp_140_rule(p)) // ',' expression @@ -14501,6 +15812,9 @@ _loop0_75_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -14554,6 +15868,9 @@ _loop0_76_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -14607,6 +15924,9 @@ _loop0_77_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -14660,6 +15980,9 @@ _loop1_78_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -14717,6 +16040,9 @@ _loop0_79_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -14770,6 +16096,9 @@ _loop1_80_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -14827,6 +16156,9 @@ _loop1_81_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -14884,6 +16216,9 @@ _loop1_82_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -14941,6 +16276,9 @@ _loop0_83_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -14994,6 +16332,9 @@ _loop1_84_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -15051,6 +16392,9 @@ _loop0_85_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -15104,6 +16448,9 @@ _loop1_86_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -15161,6 +16508,9 @@ _loop0_87_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_maybe_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -15214,6 +16564,9 @@ _loop1_88_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_maybe_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -15271,6 +16624,9 @@ _loop1_89_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('or' conjunction) + if (p->error_indicator) { + return NULL; + } void *_tmp_141_var; while ( (_tmp_141_var = _tmp_141_rule(p)) // 'or' conjunction @@ -15328,6 +16684,9 @@ _loop1_90_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('and' inversion) + if (p->error_indicator) { + return NULL; + } void *_tmp_142_var; while ( (_tmp_142_var = _tmp_142_rule(p)) // 'and' inversion @@ -15385,6 +16744,9 @@ _loop1_91_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // compare_op_bitwise_or_pair + if (p->error_indicator) { + return NULL; + } CmpopExprPair* compare_op_bitwise_or_pair_var; while ( (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair @@ -15433,6 +16795,9 @@ _tmp_92_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '!=' + if (p->error_indicator) { + return NULL; + } Token * tok; if ( (tok = _PyPegen_expect_token(p, 28)) // token='!=' @@ -15471,6 +16836,9 @@ _loop0_94_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' slice + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -15523,6 +16891,9 @@ _gather_93_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // slice _loop0_94 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -15551,6 +16922,9 @@ _tmp_95_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ':' expression? + if (p->error_indicator) { + return NULL; + } Token * _literal; void *d; if ( @@ -15583,6 +16957,9 @@ _tmp_96_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // tuple + if (p->error_indicator) { + return NULL; + } expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple @@ -15594,6 +16971,9 @@ _tmp_96_rule(Parser *p) p->mark = _mark; } { // group + if (p->error_indicator) { + return NULL; + } expr_ty group_var; if ( (group_var = group_rule(p)) // group @@ -15605,6 +16985,9 @@ _tmp_96_rule(Parser *p) p->mark = _mark; } { // genexp + if (p->error_indicator) { + return NULL; + } expr_ty genexp_var; if ( (genexp_var = genexp_rule(p)) // genexp @@ -15630,6 +17013,9 @@ _tmp_97_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // list + if (p->error_indicator) { + return NULL; + } expr_ty list_var; if ( (list_var = list_rule(p)) // list @@ -15641,6 +17027,9 @@ _tmp_97_rule(Parser *p) p->mark = _mark; } { // listcomp + if (p->error_indicator) { + return NULL; + } expr_ty listcomp_var; if ( (listcomp_var = listcomp_rule(p)) // listcomp @@ -15666,6 +17055,9 @@ _tmp_98_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // dict + if (p->error_indicator) { + return NULL; + } expr_ty dict_var; if ( (dict_var = dict_rule(p)) // dict @@ -15677,6 +17069,9 @@ _tmp_98_rule(Parser *p) p->mark = _mark; } { // set + if (p->error_indicator) { + return NULL; + } expr_ty set_var; if ( (set_var = set_rule(p)) // set @@ -15688,6 +17083,9 @@ _tmp_98_rule(Parser *p) p->mark = _mark; } { // dictcomp + if (p->error_indicator) { + return NULL; + } expr_ty dictcomp_var; if ( (dictcomp_var = dictcomp_rule(p)) // dictcomp @@ -15699,6 +17097,9 @@ _tmp_98_rule(Parser *p) p->mark = _mark; } { // setcomp + if (p->error_indicator) { + return NULL; + } expr_ty setcomp_var; if ( (setcomp_var = setcomp_rule(p)) // setcomp @@ -15733,6 +17134,9 @@ _loop1_99_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // STRING + if (p->error_indicator) { + return NULL; + } expr_ty string_var; while ( (string_var = _PyPegen_string_token(p)) // STRING @@ -15781,6 +17185,9 @@ _tmp_100_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // star_named_expression ',' star_named_expressions? + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty y; void *z; @@ -15816,6 +17223,9 @@ _tmp_101_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // yield_expr + if (p->error_indicator) { + return NULL; + } expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr @@ -15827,6 +17237,9 @@ _tmp_101_rule(Parser *p) p->mark = _mark; } { // named_expression + if (p->error_indicator) { + return NULL; + } expr_ty named_expression_var; if ( (named_expression_var = named_expression_rule(p)) // named_expression @@ -15861,6 +17274,9 @@ _loop0_103_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' kvpair + if (p->error_indicator) { + return NULL; + } Token * _literal; KeyValuePair* elem; while ( @@ -15913,6 +17329,9 @@ _gather_102_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // kvpair _loop0_103 + if (p->error_indicator) { + return NULL; + } KeyValuePair* elem; asdl_seq * seq; if ( @@ -15950,6 +17369,9 @@ _loop1_104_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // for_if_clause + if (p->error_indicator) { + return NULL; + } comprehension_ty for_if_clause_var; while ( (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause @@ -16007,6 +17429,9 @@ _loop0_105_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('if' disjunction) + if (p->error_indicator) { + return NULL; + } void *_tmp_143_var; while ( (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction @@ -16060,6 +17485,9 @@ _loop0_106_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('if' disjunction) + if (p->error_indicator) { + return NULL; + } void *_tmp_144_var; while ( (_tmp_144_var = _tmp_144_rule(p)) // 'if' disjunction @@ -16104,6 +17532,9 @@ _tmp_107_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ',' args + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty c; if ( @@ -16136,6 +17567,9 @@ _tmp_108_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ',' args + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty c; if ( @@ -16177,6 +17611,9 @@ _loop0_110_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' kwarg_or_starred + if (p->error_indicator) { + return NULL; + } Token * _literal; KeywordOrStarred* elem; while ( @@ -16229,6 +17666,9 @@ _gather_109_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // kwarg_or_starred _loop0_110 + if (p->error_indicator) { + return NULL; + } KeywordOrStarred* elem; asdl_seq * seq; if ( @@ -16266,6 +17706,9 @@ _loop0_112_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' kwarg_or_double_starred + if (p->error_indicator) { + return NULL; + } Token * _literal; KeywordOrStarred* elem; while ( @@ -16318,6 +17761,9 @@ _gather_111_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // kwarg_or_double_starred _loop0_112 + if (p->error_indicator) { + return NULL; + } KeywordOrStarred* elem; asdl_seq * seq; if ( @@ -16355,6 +17801,9 @@ _loop0_114_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' kwarg_or_starred + if (p->error_indicator) { + return NULL; + } Token * _literal; KeywordOrStarred* elem; while ( @@ -16407,6 +17856,9 @@ _gather_113_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // kwarg_or_starred _loop0_114 + if (p->error_indicator) { + return NULL; + } KeywordOrStarred* elem; asdl_seq * seq; if ( @@ -16444,6 +17896,9 @@ _loop0_116_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' kwarg_or_double_starred + if (p->error_indicator) { + return NULL; + } Token * _literal; KeywordOrStarred* elem; while ( @@ -16496,6 +17951,9 @@ _gather_115_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // kwarg_or_double_starred _loop0_116 + if (p->error_indicator) { + return NULL; + } KeywordOrStarred* elem; asdl_seq * seq; if ( @@ -16533,6 +17991,9 @@ _loop0_117_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' star_target) + if (p->error_indicator) { + return NULL; + } void *_tmp_145_var; while ( (_tmp_145_var = _tmp_145_rule(p)) // ',' star_target @@ -16586,6 +18047,9 @@ _loop0_119_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' star_target + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -16638,6 +18102,9 @@ _gather_118_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // star_target _loop0_119 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -16666,6 +18133,9 @@ _tmp_120_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // !'*' star_target + if (p->error_indicator) { + return NULL; + } expr_ty star_target_var; if ( _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' @@ -16702,6 +18172,9 @@ _loop0_122_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' del_target + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -16754,6 +18227,9 @@ _gather_121_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // del_target _loop0_122 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -16791,6 +18267,9 @@ _loop0_124_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' target + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty elem; while ( @@ -16843,6 +18322,9 @@ _gather_123_rule(Parser *p) asdl_seq * _res = NULL; int _mark = p->mark; { // target _loop0_124 + if (p->error_indicator) { + return NULL; + } expr_ty elem; asdl_seq * seq; if ( @@ -16871,6 +18353,9 @@ _tmp_125_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // args + if (p->error_indicator) { + return NULL; + } expr_ty args_var; if ( (args_var = args_rule(p)) // args @@ -16882,6 +18367,9 @@ _tmp_125_rule(Parser *p) p->mark = _mark; } { // expression for_if_clauses + if (p->error_indicator) { + return NULL; + } expr_ty expression_var; asdl_seq* for_if_clauses_var; if ( @@ -16919,6 +18407,9 @@ _loop0_126_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // star_named_expressions + if (p->error_indicator) { + return NULL; + } asdl_seq* star_named_expressions_var; while ( (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions @@ -16963,6 +18454,9 @@ _tmp_127_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '=' annotated_rhs + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty annotated_rhs_var; if ( @@ -16991,6 +18485,9 @@ _tmp_128_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // yield_expr + if (p->error_indicator) { + return NULL; + } expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr @@ -17002,6 +18499,9 @@ _tmp_128_rule(Parser *p) p->mark = _mark; } { // star_expressions + if (p->error_indicator) { + return NULL; + } expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions @@ -17027,6 +18527,9 @@ _tmp_129_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // yield_expr + if (p->error_indicator) { + return NULL; + } expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr @@ -17038,6 +18541,9 @@ _tmp_129_rule(Parser *p) p->mark = _mark; } { // star_expressions + if (p->error_indicator) { + return NULL; + } expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions @@ -17063,6 +18569,9 @@ _tmp_130_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '[' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' @@ -17074,6 +18583,9 @@ _tmp_130_rule(Parser *p) p->mark = _mark; } { // '(' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' @@ -17085,6 +18597,9 @@ _tmp_130_rule(Parser *p) p->mark = _mark; } { // '{' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' @@ -17119,6 +18634,9 @@ _loop0_131_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default + if (p->error_indicator) { + return NULL; + } arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -17163,6 +18681,9 @@ _tmp_132_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // slash_with_default + if (p->error_indicator) { + return NULL; + } SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default @@ -17174,6 +18695,9 @@ _tmp_132_rule(Parser *p) p->mark = _mark; } { // param_with_default+ + if (p->error_indicator) { + return NULL; + } asdl_seq * _loop1_146_var; if ( (_loop1_146_var = _loop1_146_rule(p)) // param_with_default+ @@ -17199,6 +18723,9 @@ _tmp_133_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' @@ -17210,6 +18737,9 @@ _tmp_133_rule(Parser *p) p->mark = _mark; } { // ',' (')' | '**') + if (p->error_indicator) { + return NULL; + } Token * _literal; void *_tmp_147_var; if ( @@ -17238,6 +18768,9 @@ _tmp_134_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ':' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' @@ -17249,6 +18782,9 @@ _tmp_134_rule(Parser *p) p->mark = _mark; } { // ',' (':' | '**') + if (p->error_indicator) { + return NULL; + } Token * _literal; void *_tmp_148_var; if ( @@ -17277,6 +18813,9 @@ _tmp_135_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // star_targets '=' + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty z; if ( @@ -17309,6 +18848,9 @@ _tmp_136_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '.' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' @@ -17320,6 +18862,9 @@ _tmp_136_rule(Parser *p) p->mark = _mark; } { // '...' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' @@ -17345,6 +18890,9 @@ _tmp_137_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '.' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' @@ -17356,6 +18904,9 @@ _tmp_137_rule(Parser *p) p->mark = _mark; } { // '...' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' @@ -17381,6 +18932,9 @@ _tmp_138_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // '@' named_expression NEWLINE + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty f; Token * newline_var; @@ -17416,6 +18970,9 @@ _tmp_139_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ',' star_expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty c; if ( @@ -17448,6 +19005,9 @@ _tmp_140_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ',' expression + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty c; if ( @@ -17480,6 +19040,9 @@ _tmp_141_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'or' conjunction + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty c; if ( @@ -17512,6 +19075,9 @@ _tmp_142_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'and' inversion + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty c; if ( @@ -17544,6 +19110,9 @@ _tmp_143_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'if' disjunction + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty z; if ( @@ -17576,6 +19145,9 @@ _tmp_144_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // 'if' disjunction + if (p->error_indicator) { + return NULL; + } Token * _keyword; expr_ty z; if ( @@ -17608,6 +19180,9 @@ _tmp_145_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ',' star_target + if (p->error_indicator) { + return NULL; + } Token * _literal; expr_ty c; if ( @@ -17649,6 +19224,9 @@ _loop1_146_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default + if (p->error_indicator) { + return NULL; + } NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -17697,6 +19275,9 @@ _tmp_147_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ')' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' @@ -17708,6 +19289,9 @@ _tmp_147_rule(Parser *p) p->mark = _mark; } { // '**' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' @@ -17733,6 +19317,9 @@ _tmp_148_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // ':' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' @@ -17744,6 +19331,9 @@ _tmp_148_rule(Parser *p) p->mark = _mark; } { // '**' + if (p->error_indicator) { + return NULL; + } Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 8f9972bb41a52..c93b348e2b44c 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -433,6 +433,12 @@ def _set_up_token_end_metadata_extraction(self) -> None: self.print("int _end_col_offset = _token->end_col_offset;") self.print("UNUSED(_end_col_offset); // Only used by EXTRA macro") + def _check_for_errors(self) -> None: + self.print("if (p->error_indicator) {") + with self.indent(): + self.print("return NULL;") + self.print("}") + def _set_up_rule_memoization(self, node: Rule, result_type: str) -> None: self.print("{") with self.indent(): @@ -468,10 +474,7 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N memoize = self._should_memoize(node) with self.indent(): - self.print("if (p->error_indicator) {") - with self.indent(): - self.print("return NULL;") - self.print("}") + self._check_for_errors() self.print(f"{result_type} _res = NULL;") if memoize: self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") @@ -500,10 +503,7 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: is_repeat1 = node.name.startswith("_loop1") with self.indent(): - self.print("if (p->error_indicator) {") - with self.indent(): - self.print("return NULL;") - self.print("}") + self._check_for_errors() self.print("void *_res = NULL;") if memoize: self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") @@ -687,6 +687,7 @@ def visit_Alt( ) -> None: self.print(f"{{ // {node}") with self.indent(): + self._check_for_errors() # Prepare variable declarations for the alternative vars = self.collect_vars(node) for v, var_type in sorted(item for item in vars.items() if item[0] is not None): From webhook-mailer at python.org Mon May 18 13:42:17 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Mon, 18 May 2020 17:42:17 -0000 Subject: [Python-checkins] bpo-40528: Improve AST generation script to do builds simultaneously (GH-19968) Message-ID: https://github.com/python/cpython/commit/63b8e0cba3d43e53a8dd8878ee1443c8427f462d commit: 63b8e0cba3d43e53a8dd8878ee1443c8427f462d branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-18T18:42:10+01:00 summary: bpo-40528: Improve AST generation script to do builds simultaneously (GH-19968) - Switch from getopt to argparse. - Removed the limitation of not being able to produce both C and H simultaneously. This will make it run faster since it parses the asdl definition once and uses the generated tree to generate both the header and the C source. files: M Makefile.pre.in M PCbuild/regen.vcxproj M Parser/asdl_c.py diff --git a/Makefile.pre.in b/Makefile.pre.in index dbfd805f1a02f..de50f6b7f7022 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -845,17 +845,15 @@ regen-pegen: .PHONY=regen-ast regen-ast: - # Regenerate Include/Python-ast.h using Parser/asdl_c.py -h + # Regenerate Include/Python-ast.h and Python/Python-ast.c using Parser/asdl_c.py $(MKDIR_P) $(srcdir)/Include - $(PYTHON_FOR_REGEN) $(srcdir)/Parser/asdl_c.py \ - -h $(srcdir)/Include/Python-ast.h.new \ - $(srcdir)/Parser/Python.asdl - $(UPDATE_FILE) $(srcdir)/Include/Python-ast.h $(srcdir)/Include/Python-ast.h.new - # Regenerate Python/Python-ast.c using Parser/asdl_c.py -c $(MKDIR_P) $(srcdir)/Python $(PYTHON_FOR_REGEN) $(srcdir)/Parser/asdl_c.py \ - -c $(srcdir)/Python/Python-ast.c.new \ - $(srcdir)/Parser/Python.asdl + $(srcdir)/Parser/Python.asdl \ + -H $(srcdir)/Include/Python-ast.h.new \ + -C $(srcdir)/Python/Python-ast.c.new + + $(UPDATE_FILE) $(srcdir)/Include/Python-ast.h $(srcdir)/Include/Python-ast.h.new $(UPDATE_FILE) $(srcdir)/Python/Python-ast.c $(srcdir)/Python/Python-ast.c.new .PHONY: regen-opcode diff --git a/PCbuild/regen.vcxproj b/PCbuild/regen.vcxproj index c97536f7dd96d..d46fb997dbd79 100644 --- a/PCbuild/regen.vcxproj +++ b/PCbuild/regen.vcxproj @@ -176,20 +176,15 @@ - - + + - - - - - - + - + diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 59bf03ef8df3d..6d572755e68e8 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1,12 +1,17 @@ #! /usr/bin/env python """Generate C code from an ASDL description.""" -import os, sys +import os +import sys + +from argparse import ArgumentParser +from pathlib import Path import asdl TABSIZE = 4 MAX_COL = 80 +AUTOGEN_MESSAGE = "/* File automatically generated by {}. */\n\n" def get_c_type(name): """Return a string for the C name of the type. @@ -1369,94 +1374,77 @@ def generate_module_def(f, mod): f.write(' return 1;\n') f.write('};\n\n') - -common_msg = "/* File automatically generated by %s. */\n\n" - -def main(srcfile, dump_module=False): - argv0 = sys.argv[0] - components = argv0.split(os.sep) - # Always join with '/' so different OS does not keep changing the file - argv0 = '/'.join(components[-2:]) - auto_gen_msg = common_msg % argv0 - mod = asdl.parse(srcfile) +def write_header(f, mod): + f.write('#ifndef Py_PYTHON_AST_H\n') + f.write('#define Py_PYTHON_AST_H\n') + f.write('#ifdef __cplusplus\n') + f.write('extern "C" {\n') + f.write('#endif\n') + f.write('\n') + f.write('#ifndef Py_LIMITED_API\n') + f.write('#include "asdl.h"\n') + f.write('\n') + f.write('#undef Yield /* undefine macro conflicting with */\n') + f.write('\n') + c = ChainOfVisitors(TypeDefVisitor(f), + StructVisitor(f)) + c.visit(mod) + f.write("// Note: these macros affect function definitions, not only call sites.\n") + PrototypeVisitor(f).visit(mod) + f.write("\n") + f.write("PyObject* PyAST_mod2obj(mod_ty t);\n") + f.write("mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n") + f.write("int PyAST_Check(PyObject* obj);\n") + f.write("#endif /* !Py_LIMITED_API */\n") + f.write('\n') + f.write('#ifdef __cplusplus\n') + f.write('}\n') + f.write('#endif\n') + f.write('#endif /* !Py_PYTHON_AST_H */\n') + +def write_source(f, mod): + f.write('#include \n') + f.write('\n') + f.write('#include "Python.h"\n') + f.write('#include "%s-ast.h"\n' % mod.name) + f.write('#include "structmember.h" // PyMemberDef\n') + f.write('\n') + + generate_module_def(f, mod) + + v = ChainOfVisitors( + PyTypesDeclareVisitor(f), + PyTypesVisitor(f), + Obj2ModPrototypeVisitor(f), + FunctionVisitor(f), + ObjVisitor(f), + Obj2ModVisitor(f), + ASTModuleVisitor(f), + PartingShots(f), + ) + v.visit(mod) + +def main(input_file, c_file, h_file, dump_module=False): + auto_gen_msg = AUTOGEN_MESSAGE.format("/".join(Path(__file__).parts[-2:])) + mod = asdl.parse(input_file) if dump_module: print('Parsed Module:') print(mod) if not asdl.check(mod): sys.exit(1) - if H_FILE: - with open(H_FILE, "w") as f: - f.write(auto_gen_msg) - f.write('#ifndef Py_PYTHON_AST_H\n') - f.write('#define Py_PYTHON_AST_H\n') - f.write('#ifdef __cplusplus\n') - f.write('extern "C" {\n') - f.write('#endif\n') - f.write('\n') - f.write('#ifndef Py_LIMITED_API\n') - f.write('#include "asdl.h"\n') - f.write('\n') - f.write('#undef Yield /* undefine macro conflicting with */\n') - f.write('\n') - c = ChainOfVisitors(TypeDefVisitor(f), - StructVisitor(f)) - - c.visit(mod) - f.write("// Note: these macros affect function definitions, not only call sites.\n") - PrototypeVisitor(f).visit(mod) - f.write("\n") - f.write("PyObject* PyAST_mod2obj(mod_ty t);\n") - f.write("mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n") - f.write("int PyAST_Check(PyObject* obj);\n") - f.write("#endif /* !Py_LIMITED_API */\n") - f.write('\n') - f.write('#ifdef __cplusplus\n') - f.write('}\n') - f.write('#endif\n') - f.write('#endif /* !Py_PYTHON_AST_H */\n') - - if C_FILE: - with open(C_FILE, "w") as f: - f.write(auto_gen_msg) - f.write('#include \n') - f.write('\n') - f.write('#include "Python.h"\n') - f.write('#include "%s-ast.h"\n' % mod.name) - f.write('#include "structmember.h" // PyMemberDef\n') - f.write('\n') - - generate_module_def(f, mod) - - v = ChainOfVisitors( - PyTypesDeclareVisitor(f), - PyTypesVisitor(f), - Obj2ModPrototypeVisitor(f), - FunctionVisitor(f), - ObjVisitor(f), - Obj2ModVisitor(f), - ASTModuleVisitor(f), - PartingShots(f), - ) - v.visit(mod) + for file, writer in (c_file, write_source), (h_file, write_header): + if file is not None: + with file.open("w") as f: + f.write(auto_gen_msg) + writer(f, mod) + print(file, "regenerated.") if __name__ == "__main__": - import getopt - - H_FILE = '' - C_FILE = '' - dump_module = False - opts, args = getopt.getopt(sys.argv[1:], "dh:c:") - for o, v in opts: - if o == '-h': - H_FILE = v - elif o == '-c': - C_FILE = v - elif o == '-d': - dump_module = True - if H_FILE and C_FILE: - print('Must specify exactly one output file') - sys.exit(1) - elif len(args) != 1: - print('Must specify single input file') - sys.exit(1) - main(args[0], dump_module) + parser = ArgumentParser() + parser.add_argument("input_file", type=Path) + parser.add_argument("-C", "--c-file", type=Path, default=None) + parser.add_argument("-H", "--h-file", type=Path, default=None) + parser.add_argument("-d", "--dump-module", action="store_true") + + options = parser.parse_args() + main(**vars(options)) From webhook-mailer at python.org Mon May 18 14:14:20 2020 From: webhook-mailer at python.org (Irit Katriel) Date: Mon, 18 May 2020 18:14:20 -0000 Subject: [Python-checkins] bpo-40662: Fixed ast.get_source_segment for ast nodes that have incomplete location information (GH-20157) Message-ID: https://github.com/python/cpython/commit/e6578a226d8a8a13d1062d154fad0fef28ee2416 commit: e6578a226d8a8a13d1062d154fad0fef28ee2416 branch: master author: Irit Katriel committer: GitHub date: 2020-05-18T19:14:12+01:00 summary: bpo-40662: Fixed ast.get_source_segment for ast nodes that have incomplete location information (GH-20157) Co-authored-by: Pablo Galindo files: A Misc/NEWS.d/next/Library/2020-05-18-12-56-45.bpo-40662.dfornR.rst M Lib/ast.py M Lib/test/test_ast.py diff --git a/Lib/ast.py b/Lib/ast.py index 61fbe030a7825..0d3b19d922368 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -332,6 +332,8 @@ def get_source_segment(source, node, *, padded=False): be padded with spaces to match its original position. """ try: + if node.end_lineno is None or node.end_col_offset is None: + return None lineno = node.lineno - 1 end_lineno = node.end_lineno - 1 col_offset = node.col_offset diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 6b71adac4e4a6..e55d10badc37e 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -1851,6 +1851,17 @@ class C: cdef = ast.parse(s).body[0] self.assertEqual(ast.get_source_segment(s, cdef.body[0], padded=True), s_method) + def test_source_segment_missing_info(self): + s = 'v = 1\r\nw = 1\nx = 1\n\ry = 1\r\n' + v, w, x, y = ast.parse(s).body + del v.lineno + del w.end_lineno + del x.col_offset + del y.end_col_offset + self.assertIsNone(ast.get_source_segment(s, v)) + self.assertIsNone(ast.get_source_segment(s, w)) + self.assertIsNone(ast.get_source_segment(s, x)) + self.assertIsNone(ast.get_source_segment(s, y)) class NodeVisitorTests(unittest.TestCase): def test_old_constant_nodes(self): diff --git a/Misc/NEWS.d/next/Library/2020-05-18-12-56-45.bpo-40662.dfornR.rst b/Misc/NEWS.d/next/Library/2020-05-18-12-56-45.bpo-40662.dfornR.rst new file mode 100644 index 0000000000000..a960c3f61b6bb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-18-12-56-45.bpo-40662.dfornR.rst @@ -0,0 +1 @@ +Fixed :func:`ast.get_source_segment` for ast nodes that have incomplete location information. Patch by Irit Katriel. From webhook-mailer at python.org Mon May 18 14:23:58 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Mon, 18 May 2020 18:23:58 -0000 Subject: [Python-checkins] bpo-40663: Correctly handle annotations with subscripts in ast_unparse.c (GH-20156) Message-ID: https://github.com/python/cpython/commit/2135e10dc717c00d10d899d232bebfc59bb25032 commit: 2135e10dc717c00d10d899d232bebfc59bb25032 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-18T19:23:48+01:00 summary: bpo-40663: Correctly handle annotations with subscripts in ast_unparse.c (GH-20156) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst M Lib/test/test_future.py M Python/ast_unparse.c diff --git a/Lib/test/test_future.py b/Lib/test/test_future.py index 56b7ac6865559..0f40357b3a731 100644 --- a/Lib/test/test_future.py +++ b/Lib/test/test_future.py @@ -275,6 +275,9 @@ def test_annotations(self): eq("dict[str, int]") eq("set[str,]") eq("tuple[str, ...]") + eq("tuple[(str, *types)]") + eq("tuple[str, int, (str, int)]") + eq("tuple[(*int, str, str, (str, int))]") eq("tuple[str, int, float, dict[str, int]]") eq("slice[0]") eq("slice[0:1]") diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst new file mode 100644 index 0000000000000..5041abc7e3eaa --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst @@ -0,0 +1,2 @@ +Correctly generate annotations where parentheses are omitted but required +(e.g: ``Type[(str, int, *other))]``. diff --git a/Python/ast_unparse.c b/Python/ast_unparse.c index d1e9d42d33bd4..e699751a05a05 100644 --- a/Python/ast_unparse.c +++ b/Python/ast_unparse.c @@ -781,8 +781,19 @@ static int append_ast_subscript(_PyUnicodeWriter *writer, expr_ty e) { APPEND_EXPR(e->v.Subscript.value, PR_ATOM); + int level = PR_TUPLE; + expr_ty slice = e->v.Subscript.slice; + if (slice->kind == Tuple_kind) { + for (Py_ssize_t i = 0; i < asdl_seq_LEN(slice->v.Tuple.elts); i++) { + expr_ty element = asdl_seq_GET(slice->v.Tuple.elts, i); + if (element->kind == Starred_kind) { + ++level; + break; + } + } + } APPEND_STR("["); - APPEND_EXPR(e->v.Subscript.slice, PR_TUPLE); + APPEND_EXPR(e->v.Subscript.slice, level); APPEND_STR_FINISH("]"); } From webhook-mailer at python.org Mon May 18 14:27:49 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Mon, 18 May 2020 18:27:49 -0000 Subject: [Python-checkins] bpo-40669: Install PEG benchmarking dependencies in a venv (GH-20183) Message-ID: https://github.com/python/cpython/commit/dc31800f86fbcd40ee616984820b885d8adaa6a7 commit: dc31800f86fbcd40ee616984820b885d8adaa6a7 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-18T11:27:40-07:00 summary: bpo-40669: Install PEG benchmarking dependencies in a venv (GH-20183) Create a `make venv` target, that creates a virtual environment and installs the dependency in that venv. `make time` and all the related targets are changed to use the virtual environment python. Automerge-Triggered-By: @pablogsal files: M Tools/peg_generator/.gitignore M Tools/peg_generator/Makefile M Tools/peg_generator/scripts/benchmark.py diff --git a/Tools/peg_generator/.gitignore b/Tools/peg_generator/.gitignore index 91c41f89e8cb5..f25e54195e4c9 100644 --- a/Tools/peg_generator/.gitignore +++ b/Tools/peg_generator/.gitignore @@ -1,3 +1,4 @@ peg_extension/parse.c data/xxl.py +venv/ @data diff --git a/Tools/peg_generator/Makefile b/Tools/peg_generator/Makefile index c1219b9263851..7b33a86b24a8b 100644 --- a/Tools/peg_generator/Makefile +++ b/Tools/peg_generator/Makefile @@ -5,7 +5,8 @@ endif ifeq ($(UNAME_S),Darwin) PYTHON ?= ../../python.exe endif - +VENVDIR ?= ./venv +VENVPYTHON ?= $(VENVDIR)/bin/python CPYTHON ?= ../../Lib MYPY ?= mypy @@ -27,6 +28,7 @@ peg_extension/parse.c: $(GRAMMAR) $(TOKENS) pegen/*.py peg_extension/peg_extensi clean: -rm -f peg_extension/*.o peg_extension/*.so peg_extension/parse.c -rm -f data/xxl.py + -rm -rf $(VENVDIR) dump: peg_extension/parse.c cat -n $(TESTFILE) @@ -41,6 +43,12 @@ regen-metaparser: pegen/metagrammar.gram pegen/*.py .PHONY: test +venv: + $(PYTHON) -m venv $(VENVDIR) + $(VENVPYTHON) -m pip install -U pip setuptools + $(VENVPYTHON) -m pip install -U memory_profiler + @echo "The venv has been created in the $(VENVDIR) directory" + test: run run: peg_extension/parse.c @@ -61,22 +69,22 @@ stats: peg_extension/parse.c data/xxl.py time: time_compile -time_compile: peg_extension/parse.c data/xxl.py - $(PYTHON) scripts/benchmark.py --parser=pegen --target=xxl compile +time_compile: venv peg_extension/parse.c data/xxl.py + $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=xxl compile -time_parse: peg_extension/parse.c data/xxl.py - $(PYTHON) scripts/benchmark.py --parser=pegen --target=xxl parse +time_parse: venv peg_extension/parse.c data/xxl.py + $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=xxl parse -time_check: peg_extension/parse.c data/xxl.py - $(PYTHON) scripts/benchmark.py --parser=pegen --target=xxl check +time_check: venv peg_extension/parse.c data/xxl.py + $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=xxl check time_stdlib: time_stdlib_compile -time_stdlib_compile: data/xxl.py - $(PYTHON) scripts/benchmark.py --parser=cpython --target=xxl compile +time_stdlib_compile: venv peg_extension/parse.c data/xxl.py + $(VENVPYTHON) scripts/benchmark.py --parser=cpython --target=xxl compile -time_stdlib_parse: data/xxl.py - $(PYTHON) scripts/benchmark.py --parser=cpython --target=xxl parse +time_stdlib_parse: venv peg_extension/parse.c data/xxl.py + $(VENVPYTHON) scripts/benchmark.py --parser=cpython --target=xxl parse test_local: $(PYTHON) scripts/test_parse_directory.py \ @@ -105,8 +113,8 @@ mypy: regen-metaparser format-python: black pegen scripts -bench: - $(PYTHON) scripts/benchmark.py --parser=pegen --target=stdlib check +bench: venv + $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=stdlib check format: format-python diff --git a/Tools/peg_generator/scripts/benchmark.py b/Tools/peg_generator/scripts/benchmark.py index d39ac3dca79e7..4942b99b6619f 100644 --- a/Tools/peg_generator/scripts/benchmark.py +++ b/Tools/peg_generator/scripts/benchmark.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3.9 +#!/usr/bin/env python3 import argparse import ast @@ -6,7 +6,12 @@ import os from time import time -import memory_profiler +try: + import memory_profiler +except ModuleNotFoundError: + print("Please run `make venv` to create a virtual environment and install" + " all the dependencies, before running this script.") + sys.exit(1) sys.path.insert(0, os.getcwd()) from peg_extension import parse From webhook-mailer at python.org Mon May 18 14:41:48 2020 From: webhook-mailer at python.org (CyberSaxosTiGER) Date: Mon, 18 May 2020 18:41:48 -0000 Subject: [Python-checkins] bpo-38870: correctly escape unprintable characters on ast.unparse (GH-20166) Message-ID: https://github.com/python/cpython/commit/d71a6492dbd5434dfa6a0ad95e3ad98aa690887a commit: d71a6492dbd5434dfa6a0ad95e3ad98aa690887a branch: master author: CyberSaxosTiGER committer: GitHub date: 2020-05-18T19:41:35+01:00 summary: bpo-38870: correctly escape unprintable characters on ast.unparse (GH-20166) Unprintable characters such as `\x00` weren't correctly roundtripped due to not using default string repr when generating docstrings. This patch correctly encodes all unprintable characters (except `\n` and `\t`, which are commonly used for formatting, and found unescaped). Co-authored-by: Pablo Galindo Co-authored-by: Batuhan Taskaya files: M Lib/ast.py M Lib/test/test_unparse.py diff --git a/Lib/ast.py b/Lib/ast.py index 0d3b19d922368..2edb7171e9671 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -1090,6 +1090,15 @@ def visit_Name(self, node): self.write(node.id) def _write_docstring(self, node): + def esc_char(c): + if c in ("\n", "\t"): + # In the AST form, we don't know the author's intentation + # about how this should be displayed. We'll only escape + # \n and \t, because they are more likely to be unescaped + # in the source + return c + return c.encode('unicode_escape').decode('ascii') + self.fill() if node.kind == "u": self.write("u") @@ -1097,11 +1106,10 @@ def _write_docstring(self, node): value = node.value if value: # Preserve quotes in the docstring by escaping them - value = value.replace("\\", "\\\\") - value = value.replace('"""', '""\"') - value = value.replace("\r", "\\r") + value = "".join(map(esc_char, value)) if value[-1] == '"': value = value.replace('"', '\\"', -1) + value = value.replace('"""', '""\\"') self.write(f'"""{value}"""') diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index 67dcb1dae79ff..6d828721b7740 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -324,7 +324,11 @@ def test_docstrings(self): '\\t', '\n', '\\n', - '\r\\r\t\\t\n\\n' + '\r\\r\t\\t\n\\n', + '""">>> content = \"\"\"blabla\"\"\" <<<"""', + r'foo\n\x00', + '??????^\X\BB\N{LONG RIGHTWARDS SQUIGGLE ARROW}' + ) for docstring in docstrings: # check as Module docstrings for easy testing From webhook-mailer at python.org Mon May 18 15:14:54 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Mon, 18 May 2020 19:14:54 -0000 Subject: [Python-checkins] bpo-40334: Reproduce error message for type comments on bare '*' in the new parser (GH-20151) Message-ID: https://github.com/python/cpython/commit/75b863aa97016c6813709eb620c43295f84dd51f commit: 75b863aa97016c6813709eb620c43295f84dd51f branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-18T20:14:47+01:00 summary: bpo-40334: Reproduce error message for type comments on bare '*' in the new parser (GH-20151) files: M Grammar/python.gram M Lib/test/test_syntax.py M Parser/pegen/parse.c M Parser/pegen/pegen.c M Parser/pegen/pegen.h diff --git a/Grammar/python.gram b/Grammar/python.gram index cca9209054626..40e7818d49602 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -661,6 +661,7 @@ invalid_parameters: RAISE_SYNTAX_ERROR("non-default argument follows default argument") } invalid_star_etc: | '*' (')' | ',' (')' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") } + | '*' ',' TYPE_COMMENT { RAISE_SYNTAX_ERROR("bare * has associated type comment") } invalid_lambda_star_etc: | '*' (':' | ',' (':' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") } invalid_double_type_comments: diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 87ceced6c62a0..a82b444b67a27 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -178,6 +178,16 @@ Traceback (most recent call last): SyntaxError: invalid syntax +>>> import ast; ast.parse(''' +... def f( +... *, # type: int +... a, # type: int +... ): +... pass +... ''', type_comments=True) +Traceback (most recent call last): +SyntaxError: bare * has associated type comment + From ast_for_funcdef(): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index e9c20327c155a..fe95d274f37d2 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -12041,7 +12041,7 @@ invalid_parameters_rule(Parser *p) return _res; } -// invalid_star_etc: '*' (')' | ',' (')' | '**')) +// invalid_star_etc: '*' (')' | ',' (')' | '**')) | '*' ',' TYPE_COMMENT static void * invalid_star_etc_rule(Parser *p) { @@ -12071,6 +12071,27 @@ invalid_star_etc_rule(Parser *p) } p->mark = _mark; } + { // '*' ',' TYPE_COMMENT + Token * _literal; + Token * _literal_1; + Token * type_comment_var; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + && + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + ) + { + _res = RAISE_SYNTAX_ERROR ( "bare * has associated type comment" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } _res = NULL; done: return _res; diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 7f3e4561de605..ca4ea824b3f28 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -431,25 +431,6 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, return NULL; } -void *_PyPegen_arguments_parsing_error(Parser *p, expr_ty e) { - int kwarg_unpacking = 0; - for (Py_ssize_t i = 0, l = asdl_seq_LEN(e->v.Call.keywords); i < l; i++) { - keyword_ty keyword = asdl_seq_GET(e->v.Call.keywords, i); - if (!keyword->arg) { - kwarg_unpacking = 1; - } - } - - const char *msg = NULL; - if (kwarg_unpacking) { - msg = "positional argument follows keyword argument unpacking"; - } else { - msg = "positional argument follows keyword argument"; - } - - return RAISE_SYNTAX_ERROR(msg); -} - #if 0 static const char * token_name(int type) @@ -2099,4 +2080,23 @@ _PyPegen_get_invalid_target(expr_ty e) default: return e; } -} \ No newline at end of file +} + +void *_PyPegen_arguments_parsing_error(Parser *p, expr_ty e) { + int kwarg_unpacking = 0; + for (Py_ssize_t i = 0, l = asdl_seq_LEN(e->v.Call.keywords); i < l; i++) { + keyword_ty keyword = asdl_seq_GET(e->v.Call.keywords, i); + if (!keyword->arg) { + kwarg_unpacking = 1; + } + } + + const char *msg = NULL; + if (kwarg_unpacking) { + msg = "positional argument follows keyword argument unpacking"; + } else { + msg = "positional argument follows keyword argument"; + } + + return RAISE_SYNTAX_ERROR(msg); +} diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index b9d4c048bb52b..146804a896fd1 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -256,13 +256,13 @@ asdl_seq *_PyPegen_seq_extract_starred_exprs(Parser *, asdl_seq *); asdl_seq *_PyPegen_seq_delete_starred_exprs(Parser *, asdl_seq *); expr_ty _PyPegen_concatenate_strings(Parser *p, asdl_seq *); asdl_seq *_PyPegen_join_sequences(Parser *, asdl_seq *, asdl_seq *); -void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); int _PyPegen_check_barry_as_flufl(Parser *); mod_ty _PyPegen_make_module(Parser *, asdl_seq *); // Error reporting helpers - expr_ty _PyPegen_get_invalid_target(expr_ty e); +void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); + void *_PyPegen_parse(Parser *); From webhook-mailer at python.org Mon May 18 16:48:56 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Mon, 18 May 2020 20:48:56 -0000 Subject: [Python-checkins] bpo-38870: Don't omit parenthesis when unparsing a slice in ast.unparse Message-ID: https://github.com/python/cpython/commit/c102a148256b00b7d48c51a1a97df19042e603de commit: c102a148256b00b7d48c51a1a97df19042e603de branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-18T21:48:49+01:00 summary: bpo-38870: Don't omit parenthesis when unparsing a slice in ast.unparse When unparsing a non-empty tuple, the parentheses can be safely omitted if there aren't any elements that explicitly require them (such as starred expressions). files: M Lib/ast.py M Lib/test/test_unparse.py diff --git a/Lib/ast.py b/Lib/ast.py index 2edb7171e9671..52e51b4858774 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -1356,10 +1356,20 @@ def visit_Call(self, node): self.traverse(e) def visit_Subscript(self, node): + def is_simple_tuple(slice_value): + # when unparsing a non-empty tuple, the parantheses can be safely + # omitted if there aren't any elements that explicitly requires + # parantheses (such as starred expressions). + return ( + isinstance(slice_value, Tuple) + and slice_value.elts + and not any(isinstance(elt, Starred) for elt in slice_value.elts) + ) + self.set_precedence(_Precedence.ATOM, node.value) self.traverse(node.value) with self.delimit("[", "]"): - if isinstance(node.slice, Tuple) and node.slice.elts: + if is_simple_tuple(node.slice): self.items_view(self.traverse, node.slice.elts) else: self.traverse(node.slice) diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index 6d828721b7740..bb725ced64db8 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -279,10 +279,13 @@ def test_dict_unpacking_in_dict(self): self.check_ast_roundtrip(r"""{**{'y': 2}, 'x': 1}""") self.check_ast_roundtrip(r"""{**{'y': 2}, **{'x': 1}}""") - def test_ext_slices(self): + def test_slices(self): self.check_ast_roundtrip("a[i]") self.check_ast_roundtrip("a[i,]") self.check_ast_roundtrip("a[i, j]") + self.check_ast_roundtrip("a[(*a,)]") + self.check_ast_roundtrip("a[(a:=b)]") + self.check_ast_roundtrip("a[(a:=b,c)]") self.check_ast_roundtrip("a[()]") self.check_ast_roundtrip("a[i:j]") self.check_ast_roundtrip("a[:j]") @@ -470,6 +473,11 @@ def test_unary_op_factor(self): for prefix in ("not",): self.check_src_roundtrip(f"{prefix} 1") + def test_slices(self): + self.check_src_roundtrip("a[1]") + self.check_src_roundtrip("a[1, 2]") + self.check_src_roundtrip("a[(1, *a)]") + class DirectoryTestCase(ASTTestCase): """Test roundtrip behaviour on all files in Lib and Lib/test.""" From webhook-mailer at python.org Mon May 18 17:48:01 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 18 May 2020 21:48:01 -0000 Subject: [Python-checkins] Regenerate the parser (#20195) Message-ID: https://github.com/python/cpython/commit/ced4e5c22756fde3e97327642c159f6699815caa commit: ced4e5c22756fde3e97327642c159f6699815caa branch: master author: Pablo Galindo committer: GitHub date: 2020-05-18T23:47:51+02:00 summary: Regenerate the parser (#20195) files: M Parser/pegen/parse.c diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index fe95d274f37d2..286f72111e351 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -12072,6 +12072,9 @@ invalid_star_etc_rule(Parser *p) p->mark = _mark; } { // '*' ',' TYPE_COMMENT + if (p->error_indicator) { + return NULL; + } Token * _literal; Token * _literal_1; Token * type_comment_var; From webhook-mailer at python.org Mon May 18 18:03:05 2020 From: webhook-mailer at python.org (Hai Shi) Date: Mon, 18 May 2020 22:03:05 -0000 Subject: [Python-checkins] bpo-40275: More lazy imports in test.support (GH-20131) Message-ID: https://github.com/python/cpython/commit/a3ec3ad9e20e7d9ed148d4cfbd22aebec608b42a commit: a3ec3ad9e20e7d9ed148d4cfbd22aebec608b42a branch: master author: Hai Shi committer: GitHub date: 2020-05-19T00:02:57+02:00 summary: bpo-40275: More lazy imports in test.support (GH-20131) Make the the following imports lazy in test.support: * bz2 * gzip * lzma * resource * zlib The following test.support decorators now need to be called with parenthesis: * @support.requires_bz2 * @support.requires_gzip * @support.requires_lzma * @support.requires_zlib For example, "@requires_zlib" becomes "@requires_zlib()". files: M Lib/test/support/__init__.py M Lib/test/test_importlib/test_zip.py M Lib/test/test_logging.py M Lib/test/test_shutil.py M Lib/test/test_tarfile.py M Lib/test/test_venv.py M Lib/test/test_zipapp.py M Lib/test/test_zipfile.py M Lib/test/test_zipfile64.py M Lib/test/test_zipimport.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index a9f9908c7fac9..8dee5b9dcc7ab 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -28,31 +28,6 @@ from .testresult import get_test_runner -try: - import zlib -except ImportError: - zlib = None - -try: - import gzip -except ImportError: - gzip = None - -try: - import bz2 -except ImportError: - bz2 = None - -try: - import lzma -except ImportError: - lzma = None - -try: - import resource -except ImportError: - resource = None - __all__ = [ # globals "PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast", @@ -705,13 +680,33 @@ def dec(*args, **kwargs): float.__getformat__("double").startswith("IEEE"), "test requires IEEE 754 doubles") -requires_zlib = unittest.skipUnless(zlib, 'requires zlib') +def requires_zlib(reason='requires zlib'): + try: + import zlib + except ImportError: + zlib = None + return unittest.skipUnless(zlib, reason) -requires_gzip = unittest.skipUnless(gzip, 'requires gzip') +def requires_gzip(reason='requires gzip'): + try: + import gzip + except ImportError: + gzip = None + return unittest.skipUnless(gzip, reason) -requires_bz2 = unittest.skipUnless(bz2, 'requires bz2') +def requires_bz2(reason='requires bz2'): + try: + import bz2 + except ImportError: + bz2 = None + return unittest.skipUnless(bz2, reason) -requires_lzma = unittest.skipUnless(lzma, 'requires lzma') +def requires_lzma(reason='requires lzma'): + try: + import lzma + except ImportError: + lzma = None + return unittest.skipUnless(lzma, reason) is_jython = sys.platform.startswith('java') @@ -1062,6 +1057,10 @@ def check_syntax_warning(testcase, statement, errtext='', *, lineno=1, offset=No def open_urlresource(url, *args, **kw): import urllib.request, urllib.parse + try: + import gzip + except ImportError: + gzip = None check = kw.pop('check', None) @@ -2579,11 +2578,16 @@ def __enter__(self): self.old_modes[report_type] = old_mode, old_file else: - if resource is not None: + try: + import resource + self.resource = resource + except ImportError: + self.resource = None + if self.resource is not None: try: - self.old_value = resource.getrlimit(resource.RLIMIT_CORE) - resource.setrlimit(resource.RLIMIT_CORE, - (0, self.old_value[1])) + self.old_value = self.resource.getrlimit(self.resource.RLIMIT_CORE) + self.resource.setrlimit(self.resource.RLIMIT_CORE, + (0, self.old_value[1])) except (ValueError, OSError): pass @@ -2621,9 +2625,9 @@ def __exit__(self, *ignore_exc): msvcrt.CrtSetReportMode(report_type, old_mode) msvcrt.CrtSetReportFile(report_type, old_file) else: - if resource is not None: + if self.resource is not None: try: - resource.setrlimit(resource.RLIMIT_CORE, self.old_value) + self.resource.setrlimit(self.resource.RLIMIT_CORE, self.old_value) except (ValueError, OSError): pass diff --git a/Lib/test/test_importlib/test_zip.py b/Lib/test/test_importlib/test_zip.py index 9466ca4a5f424..fa87cd7cb1096 100644 --- a/Lib/test/test_importlib/test_zip.py +++ b/Lib/test/test_importlib/test_zip.py @@ -10,7 +10,7 @@ from test.support import requires_zlib - at requires_zlib + at requires_zlib() class TestZip(unittest.TestCase): root = 'test.test_importlib.data' @@ -50,7 +50,7 @@ def test_files(self): assert '.whl/' in path, path - at requires_zlib + at requires_zlib() class TestEgg(TestZip): def setUp(self): # Find the path to the example-*.egg so we can add it to the front of diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 4cc45f7107115..9a114451913e8 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -5076,7 +5076,7 @@ def rotator(self, source, dest): self.assertFalse(os.path.exists(rh.namer(self.fn + ".1"))) rh.close() - @support.requires_zlib + @support.requires_zlib() def test_rotator(self): def namer(name): return name + ".gz" diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index b9fdfd1350a09..e56b337083c8f 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1246,7 +1246,7 @@ class TestArchives(BaseTest, unittest.TestCase): ### shutil.make_archive - @support.requires_zlib + @support.requires_zlib() def test_make_tarball(self): # creating something to tar root_dir, base_dir = self._create_files('') @@ -1302,7 +1302,7 @@ def _create_files(self, base_dir='dist'): write_file((root_dir, 'outer'), 'xxx') return root_dir, base_dir - @support.requires_zlib + @support.requires_zlib() @unittest.skipUnless(shutil.which('tar'), 'Need the tar command to run') def test_tarfile_vs_tar(self): @@ -1335,7 +1335,7 @@ def test_tarfile_vs_tar(self): self.assertEqual(tarball, base_name + '.tar') self.assertTrue(os.path.isfile(tarball)) - @support.requires_zlib + @support.requires_zlib() def test_make_zipfile(self): # creating something to zip root_dir, base_dir = self._create_files() @@ -1372,7 +1372,7 @@ def test_make_zipfile(self): ['dist/', 'dist/sub/', 'dist/sub2/', 'dist/file1', 'dist/file2', 'dist/sub/file3']) - @support.requires_zlib + @support.requires_zlib() @unittest.skipUnless(shutil.which('zip'), 'Need the zip command to run') def test_zipfile_vs_zip(self): @@ -1398,7 +1398,7 @@ def test_zipfile_vs_zip(self): names2 = zf.namelist() self.assertEqual(sorted(names), sorted(names2)) - @support.requires_zlib + @support.requires_zlib() @unittest.skipUnless(shutil.which('unzip'), 'Need the unzip command to run') def test_unzip_zipfile(self): @@ -1427,7 +1427,7 @@ def test_make_archive(self): base_name = os.path.join(tmpdir, 'archive') self.assertRaises(ValueError, make_archive, base_name, 'xxx') - @support.requires_zlib + @support.requires_zlib() def test_make_archive_owner_group(self): # testing make_archive with owner and group, with various combinations # this works even if there's not gid/uid support @@ -1455,7 +1455,7 @@ def test_make_archive_owner_group(self): self.assertTrue(os.path.isfile(res)) - @support.requires_zlib + @support.requires_zlib() @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") def test_tarfile_root_owner(self): root_dir, base_dir = self._create_files() @@ -1500,7 +1500,7 @@ def test_make_tarfile_in_curdir(self): self.assertEqual(make_archive('test', 'tar'), 'test.tar') self.assertTrue(os.path.isfile('test.tar')) - @support.requires_zlib + @support.requires_zlib() def test_make_zipfile_in_curdir(self): # Issue #21280 root_dir = self.mkdtemp() @@ -1555,20 +1555,20 @@ def check_unpack_archive_with_converter(self, format, converter): def test_unpack_archive_tar(self): self.check_unpack_archive('tar') - @support.requires_zlib + @support.requires_zlib() def test_unpack_archive_gztar(self): self.check_unpack_archive('gztar') - @support.requires_bz2 + @support.requires_bz2() def test_unpack_archive_bztar(self): self.check_unpack_archive('bztar') - @support.requires_lzma + @support.requires_lzma() @unittest.skipIf(AIX and not _maxdataOK(), "AIX MAXDATA must be 0x20000000 or larger") def test_unpack_archive_xztar(self): self.check_unpack_archive('xztar') - @support.requires_zlib + @support.requires_zlib() def test_unpack_archive_zip(self): self.check_unpack_archive('zip') diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py index 25e9e93604476..be66f1f89e6f3 100644 --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -57,21 +57,21 @@ class TarTest: def mode(self): return self.prefix + self.suffix - at support.requires_gzip + at support.requires_gzip() class GzipTest: tarname = gzipname suffix = 'gz' open = gzip.GzipFile if gzip else None taropen = tarfile.TarFile.gzopen - at support.requires_bz2 + at support.requires_bz2() class Bz2Test: tarname = bz2name suffix = 'bz2' open = bz2.BZ2File if bz2 else None taropen = tarfile.TarFile.bz2open - at support.requires_lzma + at support.requires_lzma() class LzmaTest: tarname = xzname suffix = 'xz' diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index b2794cd992a06..44c62193bf7cc 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -530,7 +530,7 @@ def do_test_with_pip(self, system_site_packages): # Issue #26610: pip/pep425tags.py requires ctypes @unittest.skipUnless(ctypes, 'pip requires ctypes') - @requires_zlib + @requires_zlib() def test_with_pip(self): self.do_test_with_pip(False) self.do_test_with_pip(True) diff --git a/Lib/test/test_zipapp.py b/Lib/test/test_zipapp.py index 73bddc79c17ab..69f2e55d56384 100644 --- a/Lib/test/test_zipapp.py +++ b/Lib/test/test_zipapp.py @@ -101,7 +101,7 @@ def test_create_archive_default_target(self): expected_target = self.tmpdir / 'source.pyz' self.assertTrue(expected_target.is_file()) - @requires_zlib + @requires_zlib() def test_create_archive_with_compression(self): # Test packing a directory into a compressed archive. source = self.tmpdir / 'source' diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py index 29d98c8092d30..c9ca1ddaafe19 100644 --- a/Lib/test/test_zipfile.py +++ b/Lib/test/test_zipfile.py @@ -640,7 +640,7 @@ def test_add_file_after_2107(self): self.assertEqual(zinfo.date_time, (2107, 12, 31, 23, 59, 59)) - at requires_zlib + at requires_zlib() class DeflateTestsWithSourceFile(AbstractTestsWithSourceFile, unittest.TestCase): compression = zipfile.ZIP_DEFLATED @@ -656,12 +656,12 @@ def test_per_file_compression(self): self.assertEqual(sinfo.compress_type, zipfile.ZIP_STORED) self.assertEqual(dinfo.compress_type, zipfile.ZIP_DEFLATED) - at requires_bz2 + at requires_bz2() class Bzip2TestsWithSourceFile(AbstractTestsWithSourceFile, unittest.TestCase): compression = zipfile.ZIP_BZIP2 - at requires_lzma + at requires_lzma() class LzmaTestsWithSourceFile(AbstractTestsWithSourceFile, unittest.TestCase): compression = zipfile.ZIP_LZMA @@ -1075,17 +1075,17 @@ def test_generated_valid_zip64_extra(self): self.assertEqual(zf.read(zinfo), expected_content) - at requires_zlib + at requires_zlib() class DeflateTestZip64InSmallFiles(AbstractTestZip64InSmallFiles, unittest.TestCase): compression = zipfile.ZIP_DEFLATED - at requires_bz2 + at requires_bz2() class Bzip2TestZip64InSmallFiles(AbstractTestZip64InSmallFiles, unittest.TestCase): compression = zipfile.ZIP_BZIP2 - at requires_lzma + at requires_lzma() class LzmaTestZip64InSmallFiles(AbstractTestZip64InSmallFiles, unittest.TestCase): compression = zipfile.ZIP_LZMA @@ -1120,15 +1120,15 @@ def test_write_after_close(self): class StoredWriterTests(AbstractWriterTests, unittest.TestCase): compression = zipfile.ZIP_STORED - at requires_zlib + at requires_zlib() class DeflateWriterTests(AbstractWriterTests, unittest.TestCase): compression = zipfile.ZIP_DEFLATED - at requires_bz2 + at requires_bz2() class Bzip2WriterTests(AbstractWriterTests, unittest.TestCase): compression = zipfile.ZIP_BZIP2 - at requires_lzma + at requires_lzma() class LzmaWriterTests(AbstractWriterTests, unittest.TestCase): compression = zipfile.ZIP_LZMA @@ -1582,7 +1582,7 @@ def test_unsupported_version(self): self.assertRaises(NotImplementedError, zipfile.ZipFile, io.BytesIO(data), 'r') - @requires_zlib + @requires_zlib() def test_read_unicode_filenames(self): # bug #10801 fname = findfile('zip_cp437_header.zip') @@ -2018,7 +2018,7 @@ def test_seek_tell(self): fp.seek(0, os.SEEK_SET) self.assertEqual(fp.tell(), 0) - @requires_bz2 + @requires_bz2() def test_decompress_without_3rd_party_library(self): data = b'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' zip_file = io.BytesIO(data) @@ -2076,7 +2076,7 @@ class StoredBadCrcTests(AbstractBadCrcTests, unittest.TestCase): b'lePK\005\006\0\0\0\0\001\0\001\0003\000' b'\0\0/\0\0\0\0\0') - at requires_zlib + at requires_zlib() class DeflateBadCrcTests(AbstractBadCrcTests, unittest.TestCase): compression = zipfile.ZIP_DEFLATED zip_with_bad_crc = ( @@ -2089,7 +2089,7 @@ class DeflateBadCrcTests(AbstractBadCrcTests, unittest.TestCase): b'\x00afilePK\x05\x06\x00\x00\x00\x00\x01\x00' b'\x01\x003\x00\x00\x003\x00\x00\x00\x00\x00') - at requires_bz2 + at requires_bz2() class Bzip2BadCrcTests(AbstractBadCrcTests, unittest.TestCase): compression = zipfile.ZIP_BZIP2 zip_with_bad_crc = ( @@ -2105,7 +2105,7 @@ class Bzip2BadCrcTests(AbstractBadCrcTests, unittest.TestCase): b'\x05\x06\x00\x00\x00\x00\x01\x00\x01\x003\x00\x00\x00[\x00' b'\x00\x00\x00\x00') - at requires_lzma + at requires_lzma() class LzmaBadCrcTests(AbstractBadCrcTests, unittest.TestCase): compression = zipfile.ZIP_LZMA zip_with_bad_crc = ( @@ -2172,7 +2172,7 @@ def test_bad_password(self): self.zip2.setpassword(b"perl") self.assertRaises(RuntimeError, self.zip2.read, "zero") - @requires_zlib + @requires_zlib() def test_good_password(self): self.zip.setpassword(b"python") self.assertEqual(self.zip.read("test.txt"), self.plain) @@ -2318,17 +2318,17 @@ class StoredTestsWithRandomBinaryFiles(AbstractTestsWithRandomBinaryFiles, unittest.TestCase): compression = zipfile.ZIP_STORED - at requires_zlib + at requires_zlib() class DeflateTestsWithRandomBinaryFiles(AbstractTestsWithRandomBinaryFiles, unittest.TestCase): compression = zipfile.ZIP_DEFLATED - at requires_bz2 + at requires_bz2() class Bzip2TestsWithRandomBinaryFiles(AbstractTestsWithRandomBinaryFiles, unittest.TestCase): compression = zipfile.ZIP_BZIP2 - at requires_lzma + at requires_lzma() class LzmaTestsWithRandomBinaryFiles(AbstractTestsWithRandomBinaryFiles, unittest.TestCase): compression = zipfile.ZIP_LZMA @@ -2416,7 +2416,7 @@ def test_open_write(self): self.assertEqual(zipf.read('twos'), b'222') - at requires_zlib + at requires_zlib() class TestsWithMultipleOpens(unittest.TestCase): @classmethod def setUpClass(cls): @@ -2682,7 +2682,7 @@ def test_list_command(self): PYTHONIOENCODING='ascii:backslashreplace') self.assertEqual(out, expected) - @requires_zlib + @requires_zlib() def test_create_command(self): self.addCleanup(unlink, TESTFN) with open(TESTFN, 'w') as f: diff --git a/Lib/test/test_zipfile64.py b/Lib/test/test_zipfile64.py index 56746bc08f63d..3a788de221264 100644 --- a/Lib/test/test_zipfile64.py +++ b/Lib/test/test_zipfile64.py @@ -73,7 +73,7 @@ def testStored(self): self.assertFalse(f.closed) self.zipTest(TESTFN2, zipfile.ZIP_STORED) - @requires_zlib + @requires_zlib() def testDeflated(self): # Try the temp file first. If we do TESTFN2 first, then it hogs # gigabytes of disk space for the duration of the test. diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py index 2af8689c1d2cb..560286071c690 100644 --- a/Lib/test/test_zipimport.py +++ b/Lib/test/test_zipimport.py @@ -683,7 +683,7 @@ def testLargestPossibleComment(self): self.doTest(".py", files, TESTMOD, comment=b"c" * ((1 << 16) - 1)) - at support.requires_zlib + at support.requires_zlib() class CompressedZipImportTestCase(UncompressedZipImportTestCase): compression = ZIP_DEFLATED From webhook-mailer at python.org Mon May 18 18:24:45 2020 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 18 May 2020 22:24:45 -0000 Subject: [Python-checkins] bpo-35890: Use RegQueryInfoKeyW and CryptAcquireContextW explicitly (GH-19974) Message-ID: https://github.com/python/cpython/commit/930badd414dd2376b1875e1775cb40855a87e180 commit: 930badd414dd2376b1875e1775cb40855a87e180 branch: 3.7 author: Steve Dower committer: GitHub date: 2020-05-18T23:24:41+01:00 summary: bpo-35890: Use RegQueryInfoKeyW and CryptAcquireContextW explicitly (GH-19974) Co-authored-by: Minmin Gong files: M PC/getpathp.c M PC/winreg.c M Python/bootstrap_hash.c diff --git a/PC/getpathp.c b/PC/getpathp.c index e3cd3ae1e9a60..dc4e43fe7d3b6 100644 --- a/PC/getpathp.c +++ b/PC/getpathp.c @@ -398,7 +398,7 @@ getpythonregpath(HKEY keyBase, int skipcore) goto done; } /* Find out how big our core buffer is, and how many subkeys we have */ - rc = RegQueryInfoKey(newKey, NULL, NULL, NULL, &numKeys, NULL, NULL, + rc = RegQueryInfoKeyW(newKey, NULL, NULL, NULL, &numKeys, NULL, NULL, NULL, NULL, &dataSize, NULL, NULL); if (rc!=ERROR_SUCCESS) { goto done; diff --git a/PC/winreg.c b/PC/winreg.c index 16a10c7232a17..639052d20dd08 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -1382,8 +1382,8 @@ winreg_QueryInfoKey_impl(PyObject *module, HKEY key) PyObject *l; PyObject *ret; - if ((rc = RegQueryInfoKey(key, NULL, NULL, 0, &nSubKeys, NULL, NULL, - &nValues, NULL, NULL, NULL, &ft)) + if ((rc = RegQueryInfoKeyW(key, NULL, NULL, 0, &nSubKeys, NULL, NULL, + &nValues, NULL, NULL, NULL, &ft)) != ERROR_SUCCESS) return PyErr_SetFromWindowsErrWithFunction(rc, "RegQueryInfoKey"); li.LowPart = ft.dwLowDateTime; diff --git a/Python/bootstrap_hash.c b/Python/bootstrap_hash.c index 58b0802f46688..7b560e66e3071 100644 --- a/Python/bootstrap_hash.c +++ b/Python/bootstrap_hash.c @@ -37,8 +37,8 @@ static int win32_urandom_init(int raise) { /* Acquire context */ - if (!CryptAcquireContext(&hCryptProv, NULL, NULL, - PROV_RSA_FULL, CRYPT_VERIFYCONTEXT)) + if (!CryptAcquireContextW(&hCryptProv, NULL, NULL, + PROV_RSA_FULL, CRYPT_VERIFYCONTEXT)) goto error; return 0; From webhook-mailer at python.org Mon May 18 18:36:18 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Mon, 18 May 2020 22:36:18 -0000 Subject: [Python-checkins] Use _PyErr_ChainStackItem() inside gen_send_ex(). (GH-20173) Message-ID: https://github.com/python/cpython/commit/ff7a8b03c49153021d6de5d0b2fa8b5163059ed6 commit: ff7a8b03c49153021d6de5d0b2fa8b5163059ed6 branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-18T15:36:09-07:00 summary: Use _PyErr_ChainStackItem() inside gen_send_ex(). (GH-20173) _PyErr_ChainStackItem was just added in GH-19951 (for bpo-31033). files: M Objects/genobject.c diff --git a/Objects/genobject.c b/Objects/genobject.c index 40179cdbf7dbd..271720bdf8b4c 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -203,16 +203,8 @@ gen_send_ex(PyGenObject *gen, PyObject *arg, int exc, int closing) assert(f->f_back == NULL); f->f_back = tstate->frame; - _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state; - if (exc && gi_exc_state->exc_type != NULL && - gi_exc_state->exc_type != Py_None) - { - Py_INCREF(gi_exc_state->exc_type); - Py_XINCREF(gi_exc_state->exc_value); - Py_XINCREF(gi_exc_state->exc_traceback); - _PyErr_ChainExceptions(gi_exc_state->exc_type, - gi_exc_state->exc_value, - gi_exc_state->exc_traceback); + if (exc) { + _PyErr_ChainStackItem(&gen->gi_exc_state); } gen->gi_running = 1; From webhook-mailer at python.org Mon May 18 18:37:14 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 18 May 2020 22:37:14 -0000 Subject: [Python-checkins] bpo-40669: Use requirements.pip when installing PEG dependencies (GH-20194) Message-ID: https://github.com/python/cpython/commit/3764069f3ba2a7e932837ae19265059339dc86e3 commit: 3764069f3ba2a7e932837ae19265059339dc86e3 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-18T23:37:06+01:00 summary: bpo-40669: Use requirements.pip when installing PEG dependencies (GH-20194) files: M Tools/peg_generator/Makefile diff --git a/Tools/peg_generator/Makefile b/Tools/peg_generator/Makefile index 7b33a86b24a8b..34763b543c23b 100644 --- a/Tools/peg_generator/Makefile +++ b/Tools/peg_generator/Makefile @@ -46,7 +46,7 @@ regen-metaparser: pegen/metagrammar.gram pegen/*.py venv: $(PYTHON) -m venv $(VENVDIR) $(VENVPYTHON) -m pip install -U pip setuptools - $(VENVPYTHON) -m pip install -U memory_profiler + $(VENVPYTHON) -m pip install -r requirements.pip @echo "The venv has been created in the $(VENVDIR) directory" test: run From webhook-mailer at python.org Mon May 18 22:03:02 2020 From: webhook-mailer at python.org (Anthony Sottile) Date: Tue, 19 May 2020 02:03:02 -0000 Subject: [Python-checkins] Fix code-block in zoneinfo (GH-20201) Message-ID: https://github.com/python/cpython/commit/9095f76613c8dd67beb21992def837bb8a37ed25 commit: 9095f76613c8dd67beb21992def837bb8a37ed25 branch: master author: Anthony Sottile committer: GitHub date: 2020-05-18T19:02:54-07:00 summary: Fix code-block in zoneinfo (GH-20201) ``` Warning, treated as error: /tmp/code/Doc/library/zoneinfo.rst:303:Error in "code-block" directive: 1 argument(s) required, 0 supplied. .. code-block:: >>> a = ZoneInfo("Europe/Berlin") >>> b = pickle.loads(europe_berlin_pkl) >>> a is b True ``` files: M Doc/library/zoneinfo.rst diff --git a/Doc/library/zoneinfo.rst b/Doc/library/zoneinfo.rst index 1b6f2e7bd15f0..3a4c12a73acd7 100644 --- a/Doc/library/zoneinfo.rst +++ b/Doc/library/zoneinfo.rst @@ -300,7 +300,7 @@ The behavior of a ``ZoneInfo`` file depends on how it was constructed: constructed from ``ZoneInfo("Europe/Berlin")``, one would expect the following behavior: - .. code-block:: + .. code-block:: pycon >>> a = ZoneInfo("Europe/Berlin") >>> b = pickle.loads(europe_berlin_pkl) @@ -314,7 +314,7 @@ The behavior of a ``ZoneInfo`` file depends on how it was constructed: constructed from ``ZoneInfo.no_cache("Europe/Berlin")``, one would expect the following behavior: - .. code-block:: + .. code-block:: pycon >>> a = ZoneInfo("Europe/Berlin") >>> b = pickle.loads(europe_berlin_pkl_nc) From webhook-mailer at python.org Mon May 18 22:33:09 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Tue, 19 May 2020 02:33:09 -0000 Subject: [Python-checkins] Python 3.10.0a0 (GH-20198) Message-ID: https://github.com/python/cpython/commit/d4fe098d1ee4f4056ac65cae55f5ffdd439dede0 commit: d4fe098d1ee4f4056ac65cae55f5ffdd439dede0 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-19T03:33:01+01:00 summary: Python 3.10.0a0 (GH-20198) files: A Doc/whatsnew/3.10.rst M Doc/tutorial/interpreter.rst M Doc/tutorial/stdlib.rst M Doc/tutorial/stdlib2.rst M Doc/whatsnew/index.rst M Include/patchlevel.h M Modules/getpath.c M PC/pyconfig.h M PC/python3.def M PCbuild/readme.txt M README.rst M configure M configure.ac diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst index b78d2960ac543..d2733a9968fb1 100644 --- a/Doc/tutorial/interpreter.rst +++ b/Doc/tutorial/interpreter.rst @@ -10,13 +10,13 @@ Using the Python Interpreter Invoking the Interpreter ======================== -The Python interpreter is usually installed as :file:`/usr/local/bin/python3.9` +The Python interpreter is usually installed as :file:`/usr/local/bin/python3.10` on those machines where it is available; putting :file:`/usr/local/bin` in your Unix shell's search path makes it possible to start it by typing the command: .. code-block:: text - python3.9 + python3.10 to the shell. [#]_ Since the choice of the directory where the interpreter lives is an installation option, other places are possible; check with your local @@ -24,7 +24,7 @@ Python guru or system administrator. (E.g., :file:`/usr/local/python` is a popular alternative location.) On Windows machines where you have installed Python from the :ref:`Microsoft Store -`, the :file:`python3.9` command will be available. If you have +`, the :file:`python3.10` command will be available. If you have the :ref:`py.exe launcher ` installed, you can use the :file:`py` command. See :ref:`setting-envvars` for other ways to launch Python. @@ -97,8 +97,8 @@ before printing the first prompt: .. code-block:: shell-session - $ python3.9 - Python 3.9 (default, June 4 2019, 09:25:04) + $ python3.10 + Python 3.10 (default, June 4 2019, 09:25:04) [GCC 4.8.2] on linux Type "help", "copyright", "credits" or "license" for more information. >>> diff --git a/Doc/tutorial/stdlib.rst b/Doc/tutorial/stdlib.rst index a52653b94a325..f33265cd2b0eb 100644 --- a/Doc/tutorial/stdlib.rst +++ b/Doc/tutorial/stdlib.rst @@ -15,7 +15,7 @@ operating system:: >>> import os >>> os.getcwd() # Return the current working directory - 'C:\\Python39' + 'C:\\Python310' >>> os.chdir('/server/accesslogs') # Change current working directory >>> os.system('mkdir today') # Run the command mkdir in the system shell 0 diff --git a/Doc/tutorial/stdlib2.rst b/Doc/tutorial/stdlib2.rst index 299482856ff32..298034d3b4844 100644 --- a/Doc/tutorial/stdlib2.rst +++ b/Doc/tutorial/stdlib2.rst @@ -278,7 +278,7 @@ applications include caching objects that are expensive to create:: Traceback (most recent call last): File "", line 1, in d['primary'] # entry was automatically removed - File "C:/python39/lib/weakref.py", line 46, in __getitem__ + File "C:/python310/lib/weakref.py", line 46, in __getitem__ o = self.data[key]() KeyError: 'primary' diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst new file mode 100644 index 0000000000000..96cb132811109 --- /dev/null +++ b/Doc/whatsnew/3.10.rst @@ -0,0 +1,115 @@ +**************************** + What's New In Python 3.10 +**************************** + +:Release: |release| +:Date: |today| + +.. Rules for maintenance: + + * Anyone can add text to this document. Do not spend very much time + on the wording of your changes, because your text will probably + get rewritten to some degree. + + * The maintainer will go through Misc/NEWS periodically and add + changes; it's therefore more important to add your changes to + Misc/NEWS than to this file. + + * This is not a complete list of every single change; completeness + is the purpose of Misc/NEWS. Some changes I consider too small + or esoteric to include. If such a change is added to the text, + I'll just remove it. (This is another reason you shouldn't spend + too much time on writing your addition.) + + * If you want to draw your new text to the attention of the + maintainer, add 'XXX' to the beginning of the paragraph or + section. + + * It's OK to just add a fragmentary note about a change. For + example: "XXX Describe the transmogrify() function added to the + socket module." The maintainer will research the change and + write the necessary text. + + * You can comment out your additions if you like, but it's not + necessary (especially when a final release is some months away). + + * Credit the author of a patch or bugfix. Just the name is + sufficient; the e-mail address isn't necessary. + + * It's helpful to add the bug/patch number as a comment: + + XXX Describe the transmogrify() function added to the socket + module. + (Contributed by P.Y. Developer in :issue:`12345`.) + + This saves the maintainer the effort of going through the Mercurial log + when researching a change. + +This article explains the new features in Python 3.10, compared to 3.9. + +For full details, see the :source:`Misc/NEWS` file. + +.. note:: + + Prerelease users should be aware that this document is currently in draft + form. It will be updated substantially as Python 3.10 moves towards release, + so it's worth checking back even after reading earlier versions. + + +Summary -- Release highlights +============================= + +.. This section singles out the most important changes in Python 3.10. + Brevity is key. + + +.. PEP-sized items next. + + + +New Features +============ + + + +Other Language Changes +====================== + + + +New Modules +=========== + +* None yet. + + +Improved Modules +================ + + +Optimizations +============= + + +Build and C API Changes +======================= + + + +Deprecated +========== + + + +Removed +======= + + + +Porting to Python 3.10 +====================== + +This section lists previously described changes and other bugfixes +that may require changes to your code. + + diff --git a/Doc/whatsnew/index.rst b/Doc/whatsnew/index.rst index 954e38bc6f1e3..a1ff8ec2889c9 100644 --- a/Doc/whatsnew/index.rst +++ b/Doc/whatsnew/index.rst @@ -11,6 +11,7 @@ anyone wishing to stay up-to-date after a new release. .. toctree:: :maxdepth: 2 + 3.10.rst 3.9.rst 3.8.rst 3.7.rst diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 3cbd3db76b2d8..8578b6597f602 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -17,13 +17,13 @@ /* Version parsed out into numeric values */ /*--start constants--*/ #define PY_MAJOR_VERSION 3 -#define PY_MINOR_VERSION 9 +#define PY_MINOR_VERSION 10 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA -#define PY_RELEASE_SERIAL 6 +#define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "3.9.0a6+" +#define PY_VERSION "3.10.0a0" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Modules/getpath.c b/Modules/getpath.c index 94e06b3e3e86b..91cc449218c4a 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -1297,7 +1297,7 @@ calculate_zip_path(PyCalculatePath *calculate) PyStatus res; /* Path: / "python00.zip" */ - wchar_t *path = joinpath2(calculate->platlibdir_macro, L"python00.zip"); + wchar_t *path = joinpath2(calculate->platlibdir_macro, L"python000.zip"); if (path == NULL) { return _PyStatus_NO_MEMORY(); } @@ -1305,7 +1305,7 @@ calculate_zip_path(PyCalculatePath *calculate) if (calculate->prefix_found > 0) { /* Use the reduced prefix returned by Py_GetPrefix() - Path: / / "python00.zip" */ + Path: / / "python000.zip" */ wchar_t *parent = _PyMem_RawWcsdup(calculate->prefix); if (parent == NULL) { res = _PyStatus_NO_MEMORY(); @@ -1325,10 +1325,11 @@ calculate_zip_path(PyCalculatePath *calculate) goto done; } - /* Replace "00" with version */ + /* Replace "000" with the version */ size_t len = wcslen(calculate->zip_path); - calculate->zip_path[len - 6] = VERSION[0]; - calculate->zip_path[len - 5] = VERSION[2]; + calculate->zip_path[len - 7] = VERSION[0]; + calculate->zip_path[len - 6] = VERSION[2]; + calculate->zip_path[len - 5] = VERSION[3]; res = _PyStatus_OK(); diff --git a/PC/pyconfig.h b/PC/pyconfig.h index 02216b5068012..b29f63c35bccb 100644 --- a/PC/pyconfig.h +++ b/PC/pyconfig.h @@ -269,11 +269,11 @@ Py_NO_ENABLE_SHARED to find out. Also support MS_NO_COREDLL for b/w compat */ file in their Makefile (other compilers are generally taken care of by distutils.) */ # if defined(_DEBUG) -# pragma comment(lib,"python39_d.lib") +# pragma comment(lib,"python310_d.lib") # elif defined(Py_LIMITED_API) # pragma comment(lib,"python3.lib") # else -# pragma comment(lib,"python39.lib") +# pragma comment(lib,"python310.lib") # endif /* _DEBUG */ # endif /* _MSC_VER */ # endif /* Py_BUILD_CORE */ diff --git a/PC/python3.def b/PC/python3.def index 1521ac738c0b3..6d54d4eaf71f0 100644 --- a/PC/python3.def +++ b/PC/python3.def @@ -2,802 +2,802 @@ ; It is used when building python3dll.vcxproj LIBRARY "python3" EXPORTS - PyArg_Parse=python39.PyArg_Parse - PyArg_ParseTuple=python39.PyArg_ParseTuple - PyArg_ParseTupleAndKeywords=python39.PyArg_ParseTupleAndKeywords - PyArg_UnpackTuple=python39.PyArg_UnpackTuple - PyArg_VaParse=python39.PyArg_VaParse - PyArg_VaParseTupleAndKeywords=python39.PyArg_VaParseTupleAndKeywords - PyArg_ValidateKeywordArguments=python39.PyArg_ValidateKeywordArguments - PyBaseObject_Type=python39.PyBaseObject_Type DATA - PyBool_FromLong=python39.PyBool_FromLong - PyBool_Type=python39.PyBool_Type DATA - PyByteArrayIter_Type=python39.PyByteArrayIter_Type DATA - PyByteArray_AsString=python39.PyByteArray_AsString - PyByteArray_Concat=python39.PyByteArray_Concat - PyByteArray_FromObject=python39.PyByteArray_FromObject - PyByteArray_FromStringAndSize=python39.PyByteArray_FromStringAndSize - PyByteArray_Resize=python39.PyByteArray_Resize - PyByteArray_Size=python39.PyByteArray_Size - PyByteArray_Type=python39.PyByteArray_Type DATA - PyBytesIter_Type=python39.PyBytesIter_Type DATA - PyBytes_AsString=python39.PyBytes_AsString - PyBytes_AsStringAndSize=python39.PyBytes_AsStringAndSize - PyBytes_Concat=python39.PyBytes_Concat - PyBytes_ConcatAndDel=python39.PyBytes_ConcatAndDel - PyBytes_DecodeEscape=python39.PyBytes_DecodeEscape - PyBytes_FromFormat=python39.PyBytes_FromFormat - PyBytes_FromFormatV=python39.PyBytes_FromFormatV - PyBytes_FromObject=python39.PyBytes_FromObject - PyBytes_FromString=python39.PyBytes_FromString - PyBytes_FromStringAndSize=python39.PyBytes_FromStringAndSize - PyBytes_Repr=python39.PyBytes_Repr - PyBytes_Size=python39.PyBytes_Size - PyBytes_Type=python39.PyBytes_Type DATA - PyCFunction_Call=python39.PyCFunction_Call - PyCFunction_GetFlags=python39.PyCFunction_GetFlags - PyCFunction_GetFunction=python39.PyCFunction_GetFunction - PyCFunction_GetSelf=python39.PyCFunction_GetSelf - PyCFunction_New=python39.PyCFunction_New - PyCFunction_NewEx=python39.PyCFunction_NewEx - PyCFunction_Type=python39.PyCFunction_Type DATA - PyCallIter_New=python39.PyCallIter_New - PyCallIter_Type=python39.PyCallIter_Type DATA - PyCallable_Check=python39.PyCallable_Check - PyCapsule_GetContext=python39.PyCapsule_GetContext - PyCapsule_GetDestructor=python39.PyCapsule_GetDestructor - PyCapsule_GetName=python39.PyCapsule_GetName - PyCapsule_GetPointer=python39.PyCapsule_GetPointer - PyCapsule_Import=python39.PyCapsule_Import - PyCapsule_IsValid=python39.PyCapsule_IsValid - PyCapsule_New=python39.PyCapsule_New - PyCapsule_SetContext=python39.PyCapsule_SetContext - PyCapsule_SetDestructor=python39.PyCapsule_SetDestructor - PyCapsule_SetName=python39.PyCapsule_SetName - PyCapsule_SetPointer=python39.PyCapsule_SetPointer - PyCapsule_Type=python39.PyCapsule_Type DATA - PyClassMethodDescr_Type=python39.PyClassMethodDescr_Type DATA - PyCodec_BackslashReplaceErrors=python39.PyCodec_BackslashReplaceErrors - PyCodec_Decode=python39.PyCodec_Decode - PyCodec_Decoder=python39.PyCodec_Decoder - PyCodec_Encode=python39.PyCodec_Encode - PyCodec_Encoder=python39.PyCodec_Encoder - PyCodec_IgnoreErrors=python39.PyCodec_IgnoreErrors - PyCodec_IncrementalDecoder=python39.PyCodec_IncrementalDecoder - PyCodec_IncrementalEncoder=python39.PyCodec_IncrementalEncoder - PyCodec_KnownEncoding=python39.PyCodec_KnownEncoding - PyCodec_LookupError=python39.PyCodec_LookupError - PyCodec_NameReplaceErrors=python39.PyCodec_NameReplaceErrors - PyCodec_Register=python39.PyCodec_Register - PyCodec_RegisterError=python39.PyCodec_RegisterError - PyCodec_ReplaceErrors=python39.PyCodec_ReplaceErrors - PyCodec_StreamReader=python39.PyCodec_StreamReader - PyCodec_StreamWriter=python39.PyCodec_StreamWriter - PyCodec_StrictErrors=python39.PyCodec_StrictErrors - PyCodec_XMLCharRefReplaceErrors=python39.PyCodec_XMLCharRefReplaceErrors - PyComplex_FromDoubles=python39.PyComplex_FromDoubles - PyComplex_ImagAsDouble=python39.PyComplex_ImagAsDouble - PyComplex_RealAsDouble=python39.PyComplex_RealAsDouble - PyComplex_Type=python39.PyComplex_Type DATA - PyDescr_NewClassMethod=python39.PyDescr_NewClassMethod - PyDescr_NewGetSet=python39.PyDescr_NewGetSet - PyDescr_NewMember=python39.PyDescr_NewMember - PyDescr_NewMethod=python39.PyDescr_NewMethod - PyDictItems_Type=python39.PyDictItems_Type DATA - PyDictIterItem_Type=python39.PyDictIterItem_Type DATA - PyDictIterKey_Type=python39.PyDictIterKey_Type DATA - PyDictIterValue_Type=python39.PyDictIterValue_Type DATA - PyDictKeys_Type=python39.PyDictKeys_Type DATA - PyDictProxy_New=python39.PyDictProxy_New - PyDictProxy_Type=python39.PyDictProxy_Type DATA - PyDictValues_Type=python39.PyDictValues_Type DATA - PyDict_Clear=python39.PyDict_Clear - PyDict_Contains=python39.PyDict_Contains - PyDict_Copy=python39.PyDict_Copy - PyDict_DelItem=python39.PyDict_DelItem - PyDict_DelItemString=python39.PyDict_DelItemString - PyDict_GetItem=python39.PyDict_GetItem - PyDict_GetItemString=python39.PyDict_GetItemString - PyDict_GetItemWithError=python39.PyDict_GetItemWithError - PyDict_Items=python39.PyDict_Items - PyDict_Keys=python39.PyDict_Keys - PyDict_Merge=python39.PyDict_Merge - PyDict_MergeFromSeq2=python39.PyDict_MergeFromSeq2 - PyDict_New=python39.PyDict_New - PyDict_Next=python39.PyDict_Next - PyDict_SetItem=python39.PyDict_SetItem - PyDict_SetItemString=python39.PyDict_SetItemString - PyDict_Size=python39.PyDict_Size - PyDict_Type=python39.PyDict_Type DATA - PyDict_Update=python39.PyDict_Update - PyDict_Values=python39.PyDict_Values - PyEllipsis_Type=python39.PyEllipsis_Type DATA - PyEnum_Type=python39.PyEnum_Type DATA - PyErr_BadArgument=python39.PyErr_BadArgument - PyErr_BadInternalCall=python39.PyErr_BadInternalCall - PyErr_CheckSignals=python39.PyErr_CheckSignals - PyErr_Clear=python39.PyErr_Clear - PyErr_Display=python39.PyErr_Display - PyErr_ExceptionMatches=python39.PyErr_ExceptionMatches - PyErr_Fetch=python39.PyErr_Fetch - PyErr_Format=python39.PyErr_Format - PyErr_FormatV=python39.PyErr_FormatV - PyErr_GetExcInfo=python39.PyErr_GetExcInfo - PyErr_GivenExceptionMatches=python39.PyErr_GivenExceptionMatches - PyErr_NewException=python39.PyErr_NewException - PyErr_NewExceptionWithDoc=python39.PyErr_NewExceptionWithDoc - PyErr_NoMemory=python39.PyErr_NoMemory - PyErr_NormalizeException=python39.PyErr_NormalizeException - PyErr_Occurred=python39.PyErr_Occurred - PyErr_Print=python39.PyErr_Print - PyErr_PrintEx=python39.PyErr_PrintEx - PyErr_ProgramText=python39.PyErr_ProgramText - PyErr_ResourceWarning=python39.PyErr_ResourceWarning - PyErr_Restore=python39.PyErr_Restore - PyErr_SetExcFromWindowsErr=python39.PyErr_SetExcFromWindowsErr - PyErr_SetExcFromWindowsErrWithFilename=python39.PyErr_SetExcFromWindowsErrWithFilename - PyErr_SetExcFromWindowsErrWithFilenameObject=python39.PyErr_SetExcFromWindowsErrWithFilenameObject - PyErr_SetExcFromWindowsErrWithFilenameObjects=python39.PyErr_SetExcFromWindowsErrWithFilenameObjects - PyErr_SetExcInfo=python39.PyErr_SetExcInfo - PyErr_SetFromErrno=python39.PyErr_SetFromErrno - PyErr_SetFromErrnoWithFilename=python39.PyErr_SetFromErrnoWithFilename - PyErr_SetFromErrnoWithFilenameObject=python39.PyErr_SetFromErrnoWithFilenameObject - PyErr_SetFromErrnoWithFilenameObjects=python39.PyErr_SetFromErrnoWithFilenameObjects - PyErr_SetFromWindowsErr=python39.PyErr_SetFromWindowsErr - PyErr_SetFromWindowsErrWithFilename=python39.PyErr_SetFromWindowsErrWithFilename - PyErr_SetImportError=python39.PyErr_SetImportError - PyErr_SetImportErrorSubclass=python39.PyErr_SetImportErrorSubclass - PyErr_SetInterrupt=python39.PyErr_SetInterrupt - PyErr_SetNone=python39.PyErr_SetNone - PyErr_SetObject=python39.PyErr_SetObject - PyErr_SetString=python39.PyErr_SetString - PyErr_SyntaxLocation=python39.PyErr_SyntaxLocation - PyErr_SyntaxLocationEx=python39.PyErr_SyntaxLocationEx - PyErr_WarnEx=python39.PyErr_WarnEx - PyErr_WarnExplicit=python39.PyErr_WarnExplicit - PyErr_WarnFormat=python39.PyErr_WarnFormat - PyErr_WriteUnraisable=python39.PyErr_WriteUnraisable - PyEval_AcquireLock=python39.PyEval_AcquireLock - PyEval_AcquireThread=python39.PyEval_AcquireThread - PyEval_CallFunction=python39.PyEval_CallFunction - PyEval_CallMethod=python39.PyEval_CallMethod - PyEval_CallObjectWithKeywords=python39.PyEval_CallObjectWithKeywords - PyEval_EvalCode=python39.PyEval_EvalCode - PyEval_EvalCodeEx=python39.PyEval_EvalCodeEx - PyEval_EvalFrame=python39.PyEval_EvalFrame - PyEval_EvalFrameEx=python39.PyEval_EvalFrameEx - PyEval_GetBuiltins=python39.PyEval_GetBuiltins - PyEval_GetCallStats=python39.PyEval_GetCallStats - PyEval_GetFrame=python39.PyEval_GetFrame - PyEval_GetFuncDesc=python39.PyEval_GetFuncDesc - PyEval_GetFuncName=python39.PyEval_GetFuncName - PyEval_GetGlobals=python39.PyEval_GetGlobals - PyEval_GetLocals=python39.PyEval_GetLocals - PyEval_InitThreads=python39.PyEval_InitThreads - PyEval_ReInitThreads=python39.PyEval_ReInitThreads - PyEval_ReleaseLock=python39.PyEval_ReleaseLock - PyEval_ReleaseThread=python39.PyEval_ReleaseThread - PyEval_RestoreThread=python39.PyEval_RestoreThread - PyEval_SaveThread=python39.PyEval_SaveThread - PyEval_ThreadsInitialized=python39.PyEval_ThreadsInitialized - PyExc_ArithmeticError=python39.PyExc_ArithmeticError DATA - PyExc_AssertionError=python39.PyExc_AssertionError DATA - PyExc_AttributeError=python39.PyExc_AttributeError DATA - PyExc_BaseException=python39.PyExc_BaseException DATA - PyExc_BlockingIOError=python39.PyExc_BlockingIOError DATA - PyExc_BrokenPipeError=python39.PyExc_BrokenPipeError DATA - PyExc_BufferError=python39.PyExc_BufferError DATA - PyExc_BytesWarning=python39.PyExc_BytesWarning DATA - PyExc_ChildProcessError=python39.PyExc_ChildProcessError DATA - PyExc_ConnectionAbortedError=python39.PyExc_ConnectionAbortedError DATA - PyExc_ConnectionError=python39.PyExc_ConnectionError DATA - PyExc_ConnectionRefusedError=python39.PyExc_ConnectionRefusedError DATA - PyExc_ConnectionResetError=python39.PyExc_ConnectionResetError DATA - PyExc_DeprecationWarning=python39.PyExc_DeprecationWarning DATA - PyExc_EOFError=python39.PyExc_EOFError DATA - PyExc_EnvironmentError=python39.PyExc_EnvironmentError DATA - PyExc_Exception=python39.PyExc_Exception DATA - PyExc_FileExistsError=python39.PyExc_FileExistsError DATA - PyExc_FileNotFoundError=python39.PyExc_FileNotFoundError DATA - PyExc_FloatingPointError=python39.PyExc_FloatingPointError DATA - PyExc_FutureWarning=python39.PyExc_FutureWarning DATA - PyExc_GeneratorExit=python39.PyExc_GeneratorExit DATA - PyExc_IOError=python39.PyExc_IOError DATA - PyExc_ImportError=python39.PyExc_ImportError DATA - PyExc_ImportWarning=python39.PyExc_ImportWarning DATA - PyExc_IndentationError=python39.PyExc_IndentationError DATA - PyExc_IndexError=python39.PyExc_IndexError DATA - PyExc_InterruptedError=python39.PyExc_InterruptedError DATA - PyExc_IsADirectoryError=python39.PyExc_IsADirectoryError DATA - PyExc_KeyError=python39.PyExc_KeyError DATA - PyExc_KeyboardInterrupt=python39.PyExc_KeyboardInterrupt DATA - PyExc_LookupError=python39.PyExc_LookupError DATA - PyExc_MemoryError=python39.PyExc_MemoryError DATA - PyExc_ModuleNotFoundError=python39.PyExc_ModuleNotFoundError DATA - PyExc_NameError=python39.PyExc_NameError DATA - PyExc_NotADirectoryError=python39.PyExc_NotADirectoryError DATA - PyExc_NotImplementedError=python39.PyExc_NotImplementedError DATA - PyExc_OSError=python39.PyExc_OSError DATA - PyExc_OverflowError=python39.PyExc_OverflowError DATA - PyExc_PendingDeprecationWarning=python39.PyExc_PendingDeprecationWarning DATA - PyExc_PermissionError=python39.PyExc_PermissionError DATA - PyExc_ProcessLookupError=python39.PyExc_ProcessLookupError DATA - PyExc_RecursionError=python39.PyExc_RecursionError DATA - PyExc_ReferenceError=python39.PyExc_ReferenceError DATA - PyExc_ResourceWarning=python39.PyExc_ResourceWarning DATA - PyExc_RuntimeError=python39.PyExc_RuntimeError DATA - PyExc_RuntimeWarning=python39.PyExc_RuntimeWarning DATA - PyExc_StopAsyncIteration=python39.PyExc_StopAsyncIteration DATA - PyExc_StopIteration=python39.PyExc_StopIteration DATA - PyExc_SyntaxError=python39.PyExc_SyntaxError DATA - PyExc_SyntaxWarning=python39.PyExc_SyntaxWarning DATA - PyExc_SystemError=python39.PyExc_SystemError DATA - PyExc_SystemExit=python39.PyExc_SystemExit DATA - PyExc_TabError=python39.PyExc_TabError DATA - PyExc_TimeoutError=python39.PyExc_TimeoutError DATA - PyExc_TypeError=python39.PyExc_TypeError DATA - PyExc_UnboundLocalError=python39.PyExc_UnboundLocalError DATA - PyExc_UnicodeDecodeError=python39.PyExc_UnicodeDecodeError DATA - PyExc_UnicodeEncodeError=python39.PyExc_UnicodeEncodeError DATA - PyExc_UnicodeError=python39.PyExc_UnicodeError DATA - PyExc_UnicodeTranslateError=python39.PyExc_UnicodeTranslateError DATA - PyExc_UnicodeWarning=python39.PyExc_UnicodeWarning DATA - PyExc_UserWarning=python39.PyExc_UserWarning DATA - PyExc_ValueError=python39.PyExc_ValueError DATA - PyExc_Warning=python39.PyExc_Warning DATA - PyExc_WindowsError=python39.PyExc_WindowsError DATA - PyExc_ZeroDivisionError=python39.PyExc_ZeroDivisionError DATA - PyExceptionClass_Name=python39.PyExceptionClass_Name - PyException_GetCause=python39.PyException_GetCause - PyException_GetContext=python39.PyException_GetContext - PyException_GetTraceback=python39.PyException_GetTraceback - PyException_SetCause=python39.PyException_SetCause - PyException_SetContext=python39.PyException_SetContext - PyException_SetTraceback=python39.PyException_SetTraceback - PyFile_FromFd=python39.PyFile_FromFd - PyFile_GetLine=python39.PyFile_GetLine - PyFile_WriteObject=python39.PyFile_WriteObject - PyFile_WriteString=python39.PyFile_WriteString - PyFilter_Type=python39.PyFilter_Type DATA - PyFloat_AsDouble=python39.PyFloat_AsDouble - PyFloat_FromDouble=python39.PyFloat_FromDouble - PyFloat_FromString=python39.PyFloat_FromString - PyFloat_GetInfo=python39.PyFloat_GetInfo - PyFloat_GetMax=python39.PyFloat_GetMax - PyFloat_GetMin=python39.PyFloat_GetMin - PyFloat_Type=python39.PyFloat_Type DATA - PyFrozenSet_New=python39.PyFrozenSet_New - PyFrozenSet_Type=python39.PyFrozenSet_Type DATA - PyGC_Collect=python39.PyGC_Collect - PyGILState_Ensure=python39.PyGILState_Ensure - PyGILState_GetThisThreadState=python39.PyGILState_GetThisThreadState - PyGILState_Release=python39.PyGILState_Release - PyGetSetDescr_Type=python39.PyGetSetDescr_Type DATA - PyImport_AddModule=python39.PyImport_AddModule - PyImport_AddModuleObject=python39.PyImport_AddModuleObject - PyImport_AppendInittab=python39.PyImport_AppendInittab - PyImport_Cleanup=python39.PyImport_Cleanup - PyImport_ExecCodeModule=python39.PyImport_ExecCodeModule - PyImport_ExecCodeModuleEx=python39.PyImport_ExecCodeModuleEx - PyImport_ExecCodeModuleObject=python39.PyImport_ExecCodeModuleObject - PyImport_ExecCodeModuleWithPathnames=python39.PyImport_ExecCodeModuleWithPathnames - PyImport_GetImporter=python39.PyImport_GetImporter - PyImport_GetMagicNumber=python39.PyImport_GetMagicNumber - PyImport_GetMagicTag=python39.PyImport_GetMagicTag - PyImport_GetModule=python39.PyImport_GetModule - PyImport_GetModuleDict=python39.PyImport_GetModuleDict - PyImport_Import=python39.PyImport_Import - PyImport_ImportFrozenModule=python39.PyImport_ImportFrozenModule - PyImport_ImportFrozenModuleObject=python39.PyImport_ImportFrozenModuleObject - PyImport_ImportModule=python39.PyImport_ImportModule - PyImport_ImportModuleLevel=python39.PyImport_ImportModuleLevel - PyImport_ImportModuleLevelObject=python39.PyImport_ImportModuleLevelObject - PyImport_ImportModuleNoBlock=python39.PyImport_ImportModuleNoBlock - PyImport_ReloadModule=python39.PyImport_ReloadModule - PyIndex_Check=python39.PyIndex_Check - PyInterpreterState_Clear=python39.PyInterpreterState_Clear - PyInterpreterState_Delete=python39.PyInterpreterState_Delete - PyInterpreterState_New=python39.PyInterpreterState_New - PyIter_Check=python39.PyIter_Check - PyIter_Next=python39.PyIter_Next - PyListIter_Type=python39.PyListIter_Type DATA - PyListRevIter_Type=python39.PyListRevIter_Type DATA - PyList_Append=python39.PyList_Append - PyList_AsTuple=python39.PyList_AsTuple - PyList_GetItem=python39.PyList_GetItem - PyList_GetSlice=python39.PyList_GetSlice - PyList_Insert=python39.PyList_Insert - PyList_New=python39.PyList_New - PyList_Reverse=python39.PyList_Reverse - PyList_SetItem=python39.PyList_SetItem - PyList_SetSlice=python39.PyList_SetSlice - PyList_Size=python39.PyList_Size - PyList_Sort=python39.PyList_Sort - PyList_Type=python39.PyList_Type DATA - PyLongRangeIter_Type=python39.PyLongRangeIter_Type DATA - PyLong_AsDouble=python39.PyLong_AsDouble - PyLong_AsLong=python39.PyLong_AsLong - PyLong_AsLongAndOverflow=python39.PyLong_AsLongAndOverflow - PyLong_AsLongLong=python39.PyLong_AsLongLong - PyLong_AsLongLongAndOverflow=python39.PyLong_AsLongLongAndOverflow - PyLong_AsSize_t=python39.PyLong_AsSize_t - PyLong_AsSsize_t=python39.PyLong_AsSsize_t - PyLong_AsUnsignedLong=python39.PyLong_AsUnsignedLong - PyLong_AsUnsignedLongLong=python39.PyLong_AsUnsignedLongLong - PyLong_AsUnsignedLongLongMask=python39.PyLong_AsUnsignedLongLongMask - PyLong_AsUnsignedLongMask=python39.PyLong_AsUnsignedLongMask - PyLong_AsVoidPtr=python39.PyLong_AsVoidPtr - PyLong_FromDouble=python39.PyLong_FromDouble - PyLong_FromLong=python39.PyLong_FromLong - PyLong_FromLongLong=python39.PyLong_FromLongLong - PyLong_FromSize_t=python39.PyLong_FromSize_t - PyLong_FromSsize_t=python39.PyLong_FromSsize_t - PyLong_FromString=python39.PyLong_FromString - PyLong_FromUnsignedLong=python39.PyLong_FromUnsignedLong - PyLong_FromUnsignedLongLong=python39.PyLong_FromUnsignedLongLong - PyLong_FromVoidPtr=python39.PyLong_FromVoidPtr - PyLong_GetInfo=python39.PyLong_GetInfo - PyLong_Type=python39.PyLong_Type DATA - PyMap_Type=python39.PyMap_Type DATA - PyMapping_Check=python39.PyMapping_Check - PyMapping_GetItemString=python39.PyMapping_GetItemString - PyMapping_HasKey=python39.PyMapping_HasKey - PyMapping_HasKeyString=python39.PyMapping_HasKeyString - PyMapping_Items=python39.PyMapping_Items - PyMapping_Keys=python39.PyMapping_Keys - PyMapping_Length=python39.PyMapping_Length - PyMapping_SetItemString=python39.PyMapping_SetItemString - PyMapping_Size=python39.PyMapping_Size - PyMapping_Values=python39.PyMapping_Values - PyMem_Calloc=python39.PyMem_Calloc - PyMem_Free=python39.PyMem_Free - PyMem_Malloc=python39.PyMem_Malloc - PyMem_Realloc=python39.PyMem_Realloc - PyMemberDescr_Type=python39.PyMemberDescr_Type DATA - PyMemoryView_FromMemory=python39.PyMemoryView_FromMemory - PyMemoryView_FromObject=python39.PyMemoryView_FromObject - PyMemoryView_GetContiguous=python39.PyMemoryView_GetContiguous - PyMemoryView_Type=python39.PyMemoryView_Type DATA - PyMethodDescr_Type=python39.PyMethodDescr_Type DATA - PyModuleDef_Init=python39.PyModuleDef_Init - PyModuleDef_Type=python39.PyModuleDef_Type DATA - PyModule_AddFunctions=python39.PyModule_AddFunctions - PyModule_AddIntConstant=python39.PyModule_AddIntConstant - PyModule_AddObject=python39.PyModule_AddObject - PyModule_AddStringConstant=python39.PyModule_AddStringConstant - PyModule_Create2=python39.PyModule_Create2 - PyModule_ExecDef=python39.PyModule_ExecDef - PyModule_FromDefAndSpec2=python39.PyModule_FromDefAndSpec2 - PyModule_GetDef=python39.PyModule_GetDef - PyModule_GetDict=python39.PyModule_GetDict - PyModule_GetFilename=python39.PyModule_GetFilename - PyModule_GetFilenameObject=python39.PyModule_GetFilenameObject - PyModule_GetName=python39.PyModule_GetName - PyModule_GetNameObject=python39.PyModule_GetNameObject - PyModule_GetState=python39.PyModule_GetState - PyModule_New=python39.PyModule_New - PyModule_NewObject=python39.PyModule_NewObject - PyModule_SetDocString=python39.PyModule_SetDocString - PyModule_Type=python39.PyModule_Type DATA - PyNullImporter_Type=python39.PyNullImporter_Type DATA - PyNumber_Absolute=python39.PyNumber_Absolute - PyNumber_Add=python39.PyNumber_Add - PyNumber_And=python39.PyNumber_And - PyNumber_AsSsize_t=python39.PyNumber_AsSsize_t - PyNumber_Check=python39.PyNumber_Check - PyNumber_Divmod=python39.PyNumber_Divmod - PyNumber_Float=python39.PyNumber_Float - PyNumber_FloorDivide=python39.PyNumber_FloorDivide - PyNumber_InPlaceAdd=python39.PyNumber_InPlaceAdd - PyNumber_InPlaceAnd=python39.PyNumber_InPlaceAnd - PyNumber_InPlaceFloorDivide=python39.PyNumber_InPlaceFloorDivide - PyNumber_InPlaceLshift=python39.PyNumber_InPlaceLshift - PyNumber_InPlaceMatrixMultiply=python39.PyNumber_InPlaceMatrixMultiply - PyNumber_InPlaceMultiply=python39.PyNumber_InPlaceMultiply - PyNumber_InPlaceOr=python39.PyNumber_InPlaceOr - PyNumber_InPlacePower=python39.PyNumber_InPlacePower - PyNumber_InPlaceRemainder=python39.PyNumber_InPlaceRemainder - PyNumber_InPlaceRshift=python39.PyNumber_InPlaceRshift - PyNumber_InPlaceSubtract=python39.PyNumber_InPlaceSubtract - PyNumber_InPlaceTrueDivide=python39.PyNumber_InPlaceTrueDivide - PyNumber_InPlaceXor=python39.PyNumber_InPlaceXor - PyNumber_Index=python39.PyNumber_Index - PyNumber_Invert=python39.PyNumber_Invert - PyNumber_Long=python39.PyNumber_Long - PyNumber_Lshift=python39.PyNumber_Lshift - PyNumber_MatrixMultiply=python39.PyNumber_MatrixMultiply - PyNumber_Multiply=python39.PyNumber_Multiply - PyNumber_Negative=python39.PyNumber_Negative - PyNumber_Or=python39.PyNumber_Or - PyNumber_Positive=python39.PyNumber_Positive - PyNumber_Power=python39.PyNumber_Power - PyNumber_Remainder=python39.PyNumber_Remainder - PyNumber_Rshift=python39.PyNumber_Rshift - PyNumber_Subtract=python39.PyNumber_Subtract - PyNumber_ToBase=python39.PyNumber_ToBase - PyNumber_TrueDivide=python39.PyNumber_TrueDivide - PyNumber_Xor=python39.PyNumber_Xor - PyODictItems_Type=python39.PyODictItems_Type DATA - PyODictIter_Type=python39.PyODictIter_Type DATA - PyODictKeys_Type=python39.PyODictKeys_Type DATA - PyODictValues_Type=python39.PyODictValues_Type DATA - PyODict_DelItem=python39.PyODict_DelItem - PyODict_New=python39.PyODict_New - PyODict_SetItem=python39.PyODict_SetItem - PyODict_Type=python39.PyODict_Type DATA - PyOS_AfterFork=python39.PyOS_AfterFork - PyOS_CheckStack=python39.PyOS_CheckStack - PyOS_FSPath=python39.PyOS_FSPath - PyOS_InitInterrupts=python39.PyOS_InitInterrupts - PyOS_InputHook=python39.PyOS_InputHook DATA - PyOS_InterruptOccurred=python39.PyOS_InterruptOccurred - PyOS_ReadlineFunctionPointer=python39.PyOS_ReadlineFunctionPointer DATA - PyOS_double_to_string=python39.PyOS_double_to_string - PyOS_getsig=python39.PyOS_getsig - PyOS_mystricmp=python39.PyOS_mystricmp - PyOS_mystrnicmp=python39.PyOS_mystrnicmp - PyOS_setsig=python39.PyOS_setsig - PyOS_snprintf=python39.PyOS_snprintf - PyOS_string_to_double=python39.PyOS_string_to_double - PyOS_strtol=python39.PyOS_strtol - PyOS_strtoul=python39.PyOS_strtoul - PyOS_vsnprintf=python39.PyOS_vsnprintf - PyObject_ASCII=python39.PyObject_ASCII - PyObject_AsCharBuffer=python39.PyObject_AsCharBuffer - PyObject_AsFileDescriptor=python39.PyObject_AsFileDescriptor - PyObject_AsReadBuffer=python39.PyObject_AsReadBuffer - PyObject_AsWriteBuffer=python39.PyObject_AsWriteBuffer - PyObject_Bytes=python39.PyObject_Bytes - PyObject_Call=python39.PyObject_Call - PyObject_CallFunction=python39.PyObject_CallFunction - PyObject_CallFunctionObjArgs=python39.PyObject_CallFunctionObjArgs - PyObject_CallMethod=python39.PyObject_CallMethod - PyObject_CallMethodObjArgs=python39.PyObject_CallMethodObjArgs - PyObject_CallObject=python39.PyObject_CallObject - PyObject_Calloc=python39.PyObject_Calloc - PyObject_CheckReadBuffer=python39.PyObject_CheckReadBuffer - PyObject_ClearWeakRefs=python39.PyObject_ClearWeakRefs - PyObject_DelItem=python39.PyObject_DelItem - PyObject_DelItemString=python39.PyObject_DelItemString - PyObject_Dir=python39.PyObject_Dir - PyObject_Format=python39.PyObject_Format - PyObject_Free=python39.PyObject_Free - PyObject_GC_Del=python39.PyObject_GC_Del - PyObject_GC_Track=python39.PyObject_GC_Track - PyObject_GC_UnTrack=python39.PyObject_GC_UnTrack - PyObject_GenericGetAttr=python39.PyObject_GenericGetAttr - PyObject_GenericSetAttr=python39.PyObject_GenericSetAttr - PyObject_GenericSetDict=python39.PyObject_GenericSetDict - PyObject_GetAttr=python39.PyObject_GetAttr - PyObject_GetAttrString=python39.PyObject_GetAttrString - PyObject_GetItem=python39.PyObject_GetItem - PyObject_GetIter=python39.PyObject_GetIter - PyObject_HasAttr=python39.PyObject_HasAttr - PyObject_HasAttrString=python39.PyObject_HasAttrString - PyObject_Hash=python39.PyObject_Hash - PyObject_HashNotImplemented=python39.PyObject_HashNotImplemented - PyObject_Init=python39.PyObject_Init - PyObject_InitVar=python39.PyObject_InitVar - PyObject_IsInstance=python39.PyObject_IsInstance - PyObject_IsSubclass=python39.PyObject_IsSubclass - PyObject_IsTrue=python39.PyObject_IsTrue - PyObject_Length=python39.PyObject_Length - PyObject_Malloc=python39.PyObject_Malloc - PyObject_Not=python39.PyObject_Not - PyObject_Realloc=python39.PyObject_Realloc - PyObject_Repr=python39.PyObject_Repr - PyObject_RichCompare=python39.PyObject_RichCompare - PyObject_RichCompareBool=python39.PyObject_RichCompareBool - PyObject_SelfIter=python39.PyObject_SelfIter - PyObject_SetAttr=python39.PyObject_SetAttr - PyObject_SetAttrString=python39.PyObject_SetAttrString - PyObject_SetItem=python39.PyObject_SetItem - PyObject_Size=python39.PyObject_Size - PyObject_Str=python39.PyObject_Str - PyObject_Type=python39.PyObject_Type - PyParser_SimpleParseFileFlags=python39.PyParser_SimpleParseFileFlags - PyParser_SimpleParseStringFlags=python39.PyParser_SimpleParseStringFlags - PyParser_SimpleParseStringFlagsFilename=python39.PyParser_SimpleParseStringFlagsFilename - PyProperty_Type=python39.PyProperty_Type DATA - PyRangeIter_Type=python39.PyRangeIter_Type DATA - PyRange_Type=python39.PyRange_Type DATA - PyReversed_Type=python39.PyReversed_Type DATA - PySeqIter_New=python39.PySeqIter_New - PySeqIter_Type=python39.PySeqIter_Type DATA - PySequence_Check=python39.PySequence_Check - PySequence_Concat=python39.PySequence_Concat - PySequence_Contains=python39.PySequence_Contains - PySequence_Count=python39.PySequence_Count - PySequence_DelItem=python39.PySequence_DelItem - PySequence_DelSlice=python39.PySequence_DelSlice - PySequence_Fast=python39.PySequence_Fast - PySequence_GetItem=python39.PySequence_GetItem - PySequence_GetSlice=python39.PySequence_GetSlice - PySequence_In=python39.PySequence_In - PySequence_InPlaceConcat=python39.PySequence_InPlaceConcat - PySequence_InPlaceRepeat=python39.PySequence_InPlaceRepeat - PySequence_Index=python39.PySequence_Index - PySequence_Length=python39.PySequence_Length - PySequence_List=python39.PySequence_List - PySequence_Repeat=python39.PySequence_Repeat - PySequence_SetItem=python39.PySequence_SetItem - PySequence_SetSlice=python39.PySequence_SetSlice - PySequence_Size=python39.PySequence_Size - PySequence_Tuple=python39.PySequence_Tuple - PySetIter_Type=python39.PySetIter_Type DATA - PySet_Add=python39.PySet_Add - PySet_Clear=python39.PySet_Clear - PySet_Contains=python39.PySet_Contains - PySet_Discard=python39.PySet_Discard - PySet_New=python39.PySet_New - PySet_Pop=python39.PySet_Pop - PySet_Size=python39.PySet_Size - PySet_Type=python39.PySet_Type DATA - PySlice_AdjustIndices=python39.PySlice_AdjustIndices - PySlice_GetIndices=python39.PySlice_GetIndices - PySlice_GetIndicesEx=python39.PySlice_GetIndicesEx - PySlice_New=python39.PySlice_New - PySlice_Type=python39.PySlice_Type DATA - PySlice_Unpack=python39.PySlice_Unpack - PySortWrapper_Type=python39.PySortWrapper_Type DATA - PyInterpreterState_GetID=python39.PyInterpreterState_GetID - PyState_AddModule=python39.PyState_AddModule - PyState_FindModule=python39.PyState_FindModule - PyState_RemoveModule=python39.PyState_RemoveModule - PyStructSequence_GetItem=python39.PyStructSequence_GetItem - PyStructSequence_New=python39.PyStructSequence_New - PyStructSequence_NewType=python39.PyStructSequence_NewType - PyStructSequence_SetItem=python39.PyStructSequence_SetItem - PySuper_Type=python39.PySuper_Type DATA - PySys_AddWarnOption=python39.PySys_AddWarnOption - PySys_AddWarnOptionUnicode=python39.PySys_AddWarnOptionUnicode - PySys_AddXOption=python39.PySys_AddXOption - PySys_FormatStderr=python39.PySys_FormatStderr - PySys_FormatStdout=python39.PySys_FormatStdout - PySys_GetObject=python39.PySys_GetObject - PySys_GetXOptions=python39.PySys_GetXOptions - PySys_HasWarnOptions=python39.PySys_HasWarnOptions - PySys_ResetWarnOptions=python39.PySys_ResetWarnOptions - PySys_SetArgv=python39.PySys_SetArgv - PySys_SetArgvEx=python39.PySys_SetArgvEx - PySys_SetObject=python39.PySys_SetObject - PySys_SetPath=python39.PySys_SetPath - PySys_WriteStderr=python39.PySys_WriteStderr - PySys_WriteStdout=python39.PySys_WriteStdout - PyThreadState_Clear=python39.PyThreadState_Clear - PyThreadState_Delete=python39.PyThreadState_Delete - PyThreadState_DeleteCurrent=python39.PyThreadState_DeleteCurrent - PyThreadState_Get=python39.PyThreadState_Get - PyThreadState_GetDict=python39.PyThreadState_GetDict - PyThreadState_New=python39.PyThreadState_New - PyThreadState_SetAsyncExc=python39.PyThreadState_SetAsyncExc - PyThreadState_Swap=python39.PyThreadState_Swap - PyThread_tss_alloc=python39.PyThread_tss_alloc - PyThread_tss_create=python39.PyThread_tss_create - PyThread_tss_delete=python39.PyThread_tss_delete - PyThread_tss_free=python39.PyThread_tss_free - PyThread_tss_get=python39.PyThread_tss_get - PyThread_tss_is_created=python39.PyThread_tss_is_created - PyThread_tss_set=python39.PyThread_tss_set - PyTraceBack_Here=python39.PyTraceBack_Here - PyTraceBack_Print=python39.PyTraceBack_Print - PyTraceBack_Type=python39.PyTraceBack_Type DATA - PyTupleIter_Type=python39.PyTupleIter_Type DATA - PyTuple_GetItem=python39.PyTuple_GetItem - PyTuple_GetSlice=python39.PyTuple_GetSlice - PyTuple_New=python39.PyTuple_New - PyTuple_Pack=python39.PyTuple_Pack - PyTuple_SetItem=python39.PyTuple_SetItem - PyTuple_Size=python39.PyTuple_Size - PyTuple_Type=python39.PyTuple_Type DATA - PyType_ClearCache=python39.PyType_ClearCache - PyType_FromSpec=python39.PyType_FromSpec - PyType_FromSpecWithBases=python39.PyType_FromSpecWithBases - PyType_GenericAlloc=python39.PyType_GenericAlloc - PyType_GenericNew=python39.PyType_GenericNew - PyType_GetFlags=python39.PyType_GetFlags - PyType_GetSlot=python39.PyType_GetSlot - PyType_IsSubtype=python39.PyType_IsSubtype - PyType_Modified=python39.PyType_Modified - PyType_Ready=python39.PyType_Ready - PyType_Type=python39.PyType_Type DATA - PyUnicodeDecodeError_Create=python39.PyUnicodeDecodeError_Create - PyUnicodeDecodeError_GetEncoding=python39.PyUnicodeDecodeError_GetEncoding - PyUnicodeDecodeError_GetEnd=python39.PyUnicodeDecodeError_GetEnd - PyUnicodeDecodeError_GetObject=python39.PyUnicodeDecodeError_GetObject - PyUnicodeDecodeError_GetReason=python39.PyUnicodeDecodeError_GetReason - PyUnicodeDecodeError_GetStart=python39.PyUnicodeDecodeError_GetStart - PyUnicodeDecodeError_SetEnd=python39.PyUnicodeDecodeError_SetEnd - PyUnicodeDecodeError_SetReason=python39.PyUnicodeDecodeError_SetReason - PyUnicodeDecodeError_SetStart=python39.PyUnicodeDecodeError_SetStart - PyUnicodeEncodeError_GetEncoding=python39.PyUnicodeEncodeError_GetEncoding - PyUnicodeEncodeError_GetEnd=python39.PyUnicodeEncodeError_GetEnd - PyUnicodeEncodeError_GetObject=python39.PyUnicodeEncodeError_GetObject - PyUnicodeEncodeError_GetReason=python39.PyUnicodeEncodeError_GetReason - PyUnicodeEncodeError_GetStart=python39.PyUnicodeEncodeError_GetStart - PyUnicodeEncodeError_SetEnd=python39.PyUnicodeEncodeError_SetEnd - PyUnicodeEncodeError_SetReason=python39.PyUnicodeEncodeError_SetReason - PyUnicodeEncodeError_SetStart=python39.PyUnicodeEncodeError_SetStart - PyUnicodeIter_Type=python39.PyUnicodeIter_Type DATA - PyUnicodeTranslateError_GetEnd=python39.PyUnicodeTranslateError_GetEnd - PyUnicodeTranslateError_GetObject=python39.PyUnicodeTranslateError_GetObject - PyUnicodeTranslateError_GetReason=python39.PyUnicodeTranslateError_GetReason - PyUnicodeTranslateError_GetStart=python39.PyUnicodeTranslateError_GetStart - PyUnicodeTranslateError_SetEnd=python39.PyUnicodeTranslateError_SetEnd - PyUnicodeTranslateError_SetReason=python39.PyUnicodeTranslateError_SetReason - PyUnicodeTranslateError_SetStart=python39.PyUnicodeTranslateError_SetStart - PyUnicode_Append=python39.PyUnicode_Append - PyUnicode_AppendAndDel=python39.PyUnicode_AppendAndDel - PyUnicode_AsASCIIString=python39.PyUnicode_AsASCIIString - PyUnicode_AsCharmapString=python39.PyUnicode_AsCharmapString - PyUnicode_AsDecodedObject=python39.PyUnicode_AsDecodedObject - PyUnicode_AsDecodedUnicode=python39.PyUnicode_AsDecodedUnicode - PyUnicode_AsEncodedObject=python39.PyUnicode_AsEncodedObject - PyUnicode_AsEncodedString=python39.PyUnicode_AsEncodedString - PyUnicode_AsEncodedUnicode=python39.PyUnicode_AsEncodedUnicode - PyUnicode_AsLatin1String=python39.PyUnicode_AsLatin1String - PyUnicode_AsMBCSString=python39.PyUnicode_AsMBCSString - PyUnicode_AsRawUnicodeEscapeString=python39.PyUnicode_AsRawUnicodeEscapeString - PyUnicode_AsUCS4=python39.PyUnicode_AsUCS4 - PyUnicode_AsUCS4Copy=python39.PyUnicode_AsUCS4Copy - PyUnicode_AsUTF16String=python39.PyUnicode_AsUTF16String - PyUnicode_AsUTF32String=python39.PyUnicode_AsUTF32String - PyUnicode_AsUTF8String=python39.PyUnicode_AsUTF8String - PyUnicode_AsUnicodeEscapeString=python39.PyUnicode_AsUnicodeEscapeString - PyUnicode_AsWideChar=python39.PyUnicode_AsWideChar - PyUnicode_AsWideCharString=python39.PyUnicode_AsWideCharString - PyUnicode_BuildEncodingMap=python39.PyUnicode_BuildEncodingMap - PyUnicode_Compare=python39.PyUnicode_Compare - PyUnicode_CompareWithASCIIString=python39.PyUnicode_CompareWithASCIIString - PyUnicode_Concat=python39.PyUnicode_Concat - PyUnicode_Contains=python39.PyUnicode_Contains - PyUnicode_Count=python39.PyUnicode_Count - PyUnicode_Decode=python39.PyUnicode_Decode - PyUnicode_DecodeASCII=python39.PyUnicode_DecodeASCII - PyUnicode_DecodeCharmap=python39.PyUnicode_DecodeCharmap - PyUnicode_DecodeCodePageStateful=python39.PyUnicode_DecodeCodePageStateful - PyUnicode_DecodeFSDefault=python39.PyUnicode_DecodeFSDefault - PyUnicode_DecodeFSDefaultAndSize=python39.PyUnicode_DecodeFSDefaultAndSize - PyUnicode_DecodeLatin1=python39.PyUnicode_DecodeLatin1 - PyUnicode_DecodeLocale=python39.PyUnicode_DecodeLocale - PyUnicode_DecodeLocaleAndSize=python39.PyUnicode_DecodeLocaleAndSize - PyUnicode_DecodeMBCS=python39.PyUnicode_DecodeMBCS - PyUnicode_DecodeMBCSStateful=python39.PyUnicode_DecodeMBCSStateful - PyUnicode_DecodeRawUnicodeEscape=python39.PyUnicode_DecodeRawUnicodeEscape - PyUnicode_DecodeUTF16=python39.PyUnicode_DecodeUTF16 - PyUnicode_DecodeUTF16Stateful=python39.PyUnicode_DecodeUTF16Stateful - PyUnicode_DecodeUTF32=python39.PyUnicode_DecodeUTF32 - PyUnicode_DecodeUTF32Stateful=python39.PyUnicode_DecodeUTF32Stateful - PyUnicode_DecodeUTF7=python39.PyUnicode_DecodeUTF7 - PyUnicode_DecodeUTF7Stateful=python39.PyUnicode_DecodeUTF7Stateful - PyUnicode_DecodeUTF8=python39.PyUnicode_DecodeUTF8 - PyUnicode_DecodeUTF8Stateful=python39.PyUnicode_DecodeUTF8Stateful - PyUnicode_DecodeUnicodeEscape=python39.PyUnicode_DecodeUnicodeEscape - PyUnicode_EncodeCodePage=python39.PyUnicode_EncodeCodePage - PyUnicode_EncodeFSDefault=python39.PyUnicode_EncodeFSDefault - PyUnicode_EncodeLocale=python39.PyUnicode_EncodeLocale - PyUnicode_FSConverter=python39.PyUnicode_FSConverter - PyUnicode_FSDecoder=python39.PyUnicode_FSDecoder - PyUnicode_Find=python39.PyUnicode_Find - PyUnicode_FindChar=python39.PyUnicode_FindChar - PyUnicode_Format=python39.PyUnicode_Format - PyUnicode_FromEncodedObject=python39.PyUnicode_FromEncodedObject - PyUnicode_FromFormat=python39.PyUnicode_FromFormat - PyUnicode_FromFormatV=python39.PyUnicode_FromFormatV - PyUnicode_FromObject=python39.PyUnicode_FromObject - PyUnicode_FromOrdinal=python39.PyUnicode_FromOrdinal - PyUnicode_FromString=python39.PyUnicode_FromString - PyUnicode_FromStringAndSize=python39.PyUnicode_FromStringAndSize - PyUnicode_FromWideChar=python39.PyUnicode_FromWideChar - PyUnicode_GetDefaultEncoding=python39.PyUnicode_GetDefaultEncoding - PyUnicode_GetLength=python39.PyUnicode_GetLength - PyUnicode_GetSize=python39.PyUnicode_GetSize - PyUnicode_InternFromString=python39.PyUnicode_InternFromString - PyUnicode_InternImmortal=python39.PyUnicode_InternImmortal - PyUnicode_InternInPlace=python39.PyUnicode_InternInPlace - PyUnicode_IsIdentifier=python39.PyUnicode_IsIdentifier - PyUnicode_Join=python39.PyUnicode_Join - PyUnicode_Partition=python39.PyUnicode_Partition - PyUnicode_RPartition=python39.PyUnicode_RPartition - PyUnicode_RSplit=python39.PyUnicode_RSplit - PyUnicode_ReadChar=python39.PyUnicode_ReadChar - PyUnicode_Replace=python39.PyUnicode_Replace - PyUnicode_Resize=python39.PyUnicode_Resize - PyUnicode_RichCompare=python39.PyUnicode_RichCompare - PyUnicode_Split=python39.PyUnicode_Split - PyUnicode_Splitlines=python39.PyUnicode_Splitlines - PyUnicode_Substring=python39.PyUnicode_Substring - PyUnicode_Tailmatch=python39.PyUnicode_Tailmatch - PyUnicode_Translate=python39.PyUnicode_Translate - PyUnicode_Type=python39.PyUnicode_Type DATA - PyUnicode_WriteChar=python39.PyUnicode_WriteChar - PyWeakref_GetObject=python39.PyWeakref_GetObject - PyWeakref_NewProxy=python39.PyWeakref_NewProxy - PyWeakref_NewRef=python39.PyWeakref_NewRef - PyWrapperDescr_Type=python39.PyWrapperDescr_Type DATA - PyWrapper_New=python39.PyWrapper_New - PyZip_Type=python39.PyZip_Type DATA - Py_AddPendingCall=python39.Py_AddPendingCall - Py_AtExit=python39.Py_AtExit - Py_BuildValue=python39.Py_BuildValue - Py_CompileString=python39.Py_CompileString - Py_DecRef=python39.Py_DecRef - Py_DecodeLocale=python39.Py_DecodeLocale - Py_EncodeLocale=python39.Py_EncodeLocale - Py_EndInterpreter=python39.Py_EndInterpreter - Py_EnterRecursiveCall=python39.Py_EnterRecursiveCall - Py_Exit=python39.Py_Exit - Py_FatalError=python39.Py_FatalError - Py_FileSystemDefaultEncodeErrors=python39.Py_FileSystemDefaultEncodeErrors DATA - Py_FileSystemDefaultEncoding=python39.Py_FileSystemDefaultEncoding DATA - Py_Finalize=python39.Py_Finalize - Py_FinalizeEx=python39.Py_FinalizeEx - Py_GenericAlias=python39.Py_GenericAlias - Py_GenericAliasType=python39.Py_GenericAliasType - Py_GetBuildInfo=python39.Py_GetBuildInfo - Py_GetCompiler=python39.Py_GetCompiler - Py_GetCopyright=python39.Py_GetCopyright - Py_GetExecPrefix=python39.Py_GetExecPrefix - Py_GetPath=python39.Py_GetPath - Py_GetPlatform=python39.Py_GetPlatform - Py_GetPrefix=python39.Py_GetPrefix - Py_GetProgramFullPath=python39.Py_GetProgramFullPath - Py_GetProgramName=python39.Py_GetProgramName - Py_GetPythonHome=python39.Py_GetPythonHome - Py_GetRecursionLimit=python39.Py_GetRecursionLimit - Py_GetVersion=python39.Py_GetVersion - Py_HasFileSystemDefaultEncoding=python39.Py_HasFileSystemDefaultEncoding DATA - Py_IncRef=python39.Py_IncRef - Py_Initialize=python39.Py_Initialize - Py_InitializeEx=python39.Py_InitializeEx - Py_IsInitialized=python39.Py_IsInitialized - Py_LeaveRecursiveCall=python39.Py_LeaveRecursiveCall - Py_Main=python39.Py_Main - Py_MakePendingCalls=python39.Py_MakePendingCalls - Py_NewInterpreter=python39.Py_NewInterpreter - Py_ReprEnter=python39.Py_ReprEnter - Py_ReprLeave=python39.Py_ReprLeave - Py_SetPath=python39.Py_SetPath - Py_SetProgramName=python39.Py_SetProgramName - Py_SetPythonHome=python39.Py_SetPythonHome - Py_SetRecursionLimit=python39.Py_SetRecursionLimit - Py_SymtableString=python39.Py_SymtableString - Py_UTF8Mode=python39.Py_UTF8Mode DATA - Py_VaBuildValue=python39.Py_VaBuildValue - _PyArg_ParseTupleAndKeywords_SizeT=python39._PyArg_ParseTupleAndKeywords_SizeT - _PyArg_ParseTuple_SizeT=python39._PyArg_ParseTuple_SizeT - _PyArg_Parse_SizeT=python39._PyArg_Parse_SizeT - _PyArg_VaParseTupleAndKeywords_SizeT=python39._PyArg_VaParseTupleAndKeywords_SizeT - _PyArg_VaParse_SizeT=python39._PyArg_VaParse_SizeT - _PyErr_BadInternalCall=python39._PyErr_BadInternalCall - _PyObject_CallFunction_SizeT=python39._PyObject_CallFunction_SizeT - _PyObject_CallMethod_SizeT=python39._PyObject_CallMethod_SizeT - _PyObject_GC_Malloc=python39._PyObject_GC_Malloc - _PyObject_GC_New=python39._PyObject_GC_New - _PyObject_GC_NewVar=python39._PyObject_GC_NewVar - _PyObject_GC_Resize=python39._PyObject_GC_Resize - _PyObject_New=python39._PyObject_New - _PyObject_NewVar=python39._PyObject_NewVar - _PyState_AddModule=python39._PyState_AddModule - _PyThreadState_Init=python39._PyThreadState_Init - _PyThreadState_Prealloc=python39._PyThreadState_Prealloc - _PyTrash_delete_later=python39._PyTrash_delete_later DATA - _PyTrash_delete_nesting=python39._PyTrash_delete_nesting DATA - _PyTrash_deposit_object=python39._PyTrash_deposit_object - _PyTrash_destroy_chain=python39._PyTrash_destroy_chain - _PyTrash_thread_deposit_object=python39._PyTrash_thread_deposit_object - _PyTrash_thread_destroy_chain=python39._PyTrash_thread_destroy_chain - _PyWeakref_CallableProxyType=python39._PyWeakref_CallableProxyType DATA - _PyWeakref_ProxyType=python39._PyWeakref_ProxyType DATA - _PyWeakref_RefType=python39._PyWeakref_RefType DATA - _Py_BuildValue_SizeT=python39._Py_BuildValue_SizeT - _Py_CheckRecursionLimit=python39._Py_CheckRecursionLimit DATA - _Py_CheckRecursiveCall=python39._Py_CheckRecursiveCall - _Py_Dealloc=python39._Py_Dealloc - _Py_EllipsisObject=python39._Py_EllipsisObject DATA - _Py_FalseStruct=python39._Py_FalseStruct DATA - _Py_NoneStruct=python39._Py_NoneStruct DATA - _Py_NotImplementedStruct=python39._Py_NotImplementedStruct DATA - _Py_SwappedOp=python39._Py_SwappedOp DATA - _Py_TrueStruct=python39._Py_TrueStruct DATA - _Py_VaBuildValue_SizeT=python39._Py_VaBuildValue_SizeT + PyArg_Parse=python310.PyArg_Parse + PyArg_ParseTuple=python310.PyArg_ParseTuple + PyArg_ParseTupleAndKeywords=python310.PyArg_ParseTupleAndKeywords + PyArg_UnpackTuple=python310.PyArg_UnpackTuple + PyArg_VaParse=python310.PyArg_VaParse + PyArg_VaParseTupleAndKeywords=python310.PyArg_VaParseTupleAndKeywords + PyArg_ValidateKeywordArguments=python310.PyArg_ValidateKeywordArguments + PyBaseObject_Type=python310.PyBaseObject_Type DATA + PyBool_FromLong=python310.PyBool_FromLong + PyBool_Type=python310.PyBool_Type DATA + PyByteArrayIter_Type=python310.PyByteArrayIter_Type DATA + PyByteArray_AsString=python310.PyByteArray_AsString + PyByteArray_Concat=python310.PyByteArray_Concat + PyByteArray_FromObject=python310.PyByteArray_FromObject + PyByteArray_FromStringAndSize=python310.PyByteArray_FromStringAndSize + PyByteArray_Resize=python310.PyByteArray_Resize + PyByteArray_Size=python310.PyByteArray_Size + PyByteArray_Type=python310.PyByteArray_Type DATA + PyBytesIter_Type=python310.PyBytesIter_Type DATA + PyBytes_AsString=python310.PyBytes_AsString + PyBytes_AsStringAndSize=python310.PyBytes_AsStringAndSize + PyBytes_Concat=python310.PyBytes_Concat + PyBytes_ConcatAndDel=python310.PyBytes_ConcatAndDel + PyBytes_DecodeEscape=python310.PyBytes_DecodeEscape + PyBytes_FromFormat=python310.PyBytes_FromFormat + PyBytes_FromFormatV=python310.PyBytes_FromFormatV + PyBytes_FromObject=python310.PyBytes_FromObject + PyBytes_FromString=python310.PyBytes_FromString + PyBytes_FromStringAndSize=python310.PyBytes_FromStringAndSize + PyBytes_Repr=python310.PyBytes_Repr + PyBytes_Size=python310.PyBytes_Size + PyBytes_Type=python310.PyBytes_Type DATA + PyCFunction_Call=python310.PyCFunction_Call + PyCFunction_GetFlags=python310.PyCFunction_GetFlags + PyCFunction_GetFunction=python310.PyCFunction_GetFunction + PyCFunction_GetSelf=python310.PyCFunction_GetSelf + PyCFunction_New=python310.PyCFunction_New + PyCFunction_NewEx=python310.PyCFunction_NewEx + PyCFunction_Type=python310.PyCFunction_Type DATA + PyCallIter_New=python310.PyCallIter_New + PyCallIter_Type=python310.PyCallIter_Type DATA + PyCallable_Check=python310.PyCallable_Check + PyCapsule_GetContext=python310.PyCapsule_GetContext + PyCapsule_GetDestructor=python310.PyCapsule_GetDestructor + PyCapsule_GetName=python310.PyCapsule_GetName + PyCapsule_GetPointer=python310.PyCapsule_GetPointer + PyCapsule_Import=python310.PyCapsule_Import + PyCapsule_IsValid=python310.PyCapsule_IsValid + PyCapsule_New=python310.PyCapsule_New + PyCapsule_SetContext=python310.PyCapsule_SetContext + PyCapsule_SetDestructor=python310.PyCapsule_SetDestructor + PyCapsule_SetName=python310.PyCapsule_SetName + PyCapsule_SetPointer=python310.PyCapsule_SetPointer + PyCapsule_Type=python310.PyCapsule_Type DATA + PyClassMethodDescr_Type=python310.PyClassMethodDescr_Type DATA + PyCodec_BackslashReplaceErrors=python310.PyCodec_BackslashReplaceErrors + PyCodec_Decode=python310.PyCodec_Decode + PyCodec_Decoder=python310.PyCodec_Decoder + PyCodec_Encode=python310.PyCodec_Encode + PyCodec_Encoder=python310.PyCodec_Encoder + PyCodec_IgnoreErrors=python310.PyCodec_IgnoreErrors + PyCodec_IncrementalDecoder=python310.PyCodec_IncrementalDecoder + PyCodec_IncrementalEncoder=python310.PyCodec_IncrementalEncoder + PyCodec_KnownEncoding=python310.PyCodec_KnownEncoding + PyCodec_LookupError=python310.PyCodec_LookupError + PyCodec_NameReplaceErrors=python310.PyCodec_NameReplaceErrors + PyCodec_Register=python310.PyCodec_Register + PyCodec_RegisterError=python310.PyCodec_RegisterError + PyCodec_ReplaceErrors=python310.PyCodec_ReplaceErrors + PyCodec_StreamReader=python310.PyCodec_StreamReader + PyCodec_StreamWriter=python310.PyCodec_StreamWriter + PyCodec_StrictErrors=python310.PyCodec_StrictErrors + PyCodec_XMLCharRefReplaceErrors=python310.PyCodec_XMLCharRefReplaceErrors + PyComplex_FromDoubles=python310.PyComplex_FromDoubles + PyComplex_ImagAsDouble=python310.PyComplex_ImagAsDouble + PyComplex_RealAsDouble=python310.PyComplex_RealAsDouble + PyComplex_Type=python310.PyComplex_Type DATA + PyDescr_NewClassMethod=python310.PyDescr_NewClassMethod + PyDescr_NewGetSet=python310.PyDescr_NewGetSet + PyDescr_NewMember=python310.PyDescr_NewMember + PyDescr_NewMethod=python310.PyDescr_NewMethod + PyDictItems_Type=python310.PyDictItems_Type DATA + PyDictIterItem_Type=python310.PyDictIterItem_Type DATA + PyDictIterKey_Type=python310.PyDictIterKey_Type DATA + PyDictIterValue_Type=python310.PyDictIterValue_Type DATA + PyDictKeys_Type=python310.PyDictKeys_Type DATA + PyDictProxy_New=python310.PyDictProxy_New + PyDictProxy_Type=python310.PyDictProxy_Type DATA + PyDictValues_Type=python310.PyDictValues_Type DATA + PyDict_Clear=python310.PyDict_Clear + PyDict_Contains=python310.PyDict_Contains + PyDict_Copy=python310.PyDict_Copy + PyDict_DelItem=python310.PyDict_DelItem + PyDict_DelItemString=python310.PyDict_DelItemString + PyDict_GetItem=python310.PyDict_GetItem + PyDict_GetItemString=python310.PyDict_GetItemString + PyDict_GetItemWithError=python310.PyDict_GetItemWithError + PyDict_Items=python310.PyDict_Items + PyDict_Keys=python310.PyDict_Keys + PyDict_Merge=python310.PyDict_Merge + PyDict_MergeFromSeq2=python310.PyDict_MergeFromSeq2 + PyDict_New=python310.PyDict_New + PyDict_Next=python310.PyDict_Next + PyDict_SetItem=python310.PyDict_SetItem + PyDict_SetItemString=python310.PyDict_SetItemString + PyDict_Size=python310.PyDict_Size + PyDict_Type=python310.PyDict_Type DATA + PyDict_Update=python310.PyDict_Update + PyDict_Values=python310.PyDict_Values + PyEllipsis_Type=python310.PyEllipsis_Type DATA + PyEnum_Type=python310.PyEnum_Type DATA + PyErr_BadArgument=python310.PyErr_BadArgument + PyErr_BadInternalCall=python310.PyErr_BadInternalCall + PyErr_CheckSignals=python310.PyErr_CheckSignals + PyErr_Clear=python310.PyErr_Clear + PyErr_Display=python310.PyErr_Display + PyErr_ExceptionMatches=python310.PyErr_ExceptionMatches + PyErr_Fetch=python310.PyErr_Fetch + PyErr_Format=python310.PyErr_Format + PyErr_FormatV=python310.PyErr_FormatV + PyErr_GetExcInfo=python310.PyErr_GetExcInfo + PyErr_GivenExceptionMatches=python310.PyErr_GivenExceptionMatches + PyErr_NewException=python310.PyErr_NewException + PyErr_NewExceptionWithDoc=python310.PyErr_NewExceptionWithDoc + PyErr_NoMemory=python310.PyErr_NoMemory + PyErr_NormalizeException=python310.PyErr_NormalizeException + PyErr_Occurred=python310.PyErr_Occurred + PyErr_Print=python310.PyErr_Print + PyErr_PrintEx=python310.PyErr_PrintEx + PyErr_ProgramText=python310.PyErr_ProgramText + PyErr_ResourceWarning=python310.PyErr_ResourceWarning + PyErr_Restore=python310.PyErr_Restore + PyErr_SetExcFromWindowsErr=python310.PyErr_SetExcFromWindowsErr + PyErr_SetExcFromWindowsErrWithFilename=python310.PyErr_SetExcFromWindowsErrWithFilename + PyErr_SetExcFromWindowsErrWithFilenameObject=python310.PyErr_SetExcFromWindowsErrWithFilenameObject + PyErr_SetExcFromWindowsErrWithFilenameObjects=python310.PyErr_SetExcFromWindowsErrWithFilenameObjects + PyErr_SetExcInfo=python310.PyErr_SetExcInfo + PyErr_SetFromErrno=python310.PyErr_SetFromErrno + PyErr_SetFromErrnoWithFilename=python310.PyErr_SetFromErrnoWithFilename + PyErr_SetFromErrnoWithFilenameObject=python310.PyErr_SetFromErrnoWithFilenameObject + PyErr_SetFromErrnoWithFilenameObjects=python310.PyErr_SetFromErrnoWithFilenameObjects + PyErr_SetFromWindowsErr=python310.PyErr_SetFromWindowsErr + PyErr_SetFromWindowsErrWithFilename=python310.PyErr_SetFromWindowsErrWithFilename + PyErr_SetImportError=python310.PyErr_SetImportError + PyErr_SetImportErrorSubclass=python310.PyErr_SetImportErrorSubclass + PyErr_SetInterrupt=python310.PyErr_SetInterrupt + PyErr_SetNone=python310.PyErr_SetNone + PyErr_SetObject=python310.PyErr_SetObject + PyErr_SetString=python310.PyErr_SetString + PyErr_SyntaxLocation=python310.PyErr_SyntaxLocation + PyErr_SyntaxLocationEx=python310.PyErr_SyntaxLocationEx + PyErr_WarnEx=python310.PyErr_WarnEx + PyErr_WarnExplicit=python310.PyErr_WarnExplicit + PyErr_WarnFormat=python310.PyErr_WarnFormat + PyErr_WriteUnraisable=python310.PyErr_WriteUnraisable + PyEval_AcquireLock=python310.PyEval_AcquireLock + PyEval_AcquireThread=python310.PyEval_AcquireThread + PyEval_CallFunction=python310.PyEval_CallFunction + PyEval_CallMethod=python310.PyEval_CallMethod + PyEval_CallObjectWithKeywords=python310.PyEval_CallObjectWithKeywords + PyEval_EvalCode=python310.PyEval_EvalCode + PyEval_EvalCodeEx=python310.PyEval_EvalCodeEx + PyEval_EvalFrame=python310.PyEval_EvalFrame + PyEval_EvalFrameEx=python310.PyEval_EvalFrameEx + PyEval_GetBuiltins=python310.PyEval_GetBuiltins + PyEval_GetCallStats=python310.PyEval_GetCallStats + PyEval_GetFrame=python310.PyEval_GetFrame + PyEval_GetFuncDesc=python310.PyEval_GetFuncDesc + PyEval_GetFuncName=python310.PyEval_GetFuncName + PyEval_GetGlobals=python310.PyEval_GetGlobals + PyEval_GetLocals=python310.PyEval_GetLocals + PyEval_InitThreads=python310.PyEval_InitThreads + PyEval_ReInitThreads=python310.PyEval_ReInitThreads + PyEval_ReleaseLock=python310.PyEval_ReleaseLock + PyEval_ReleaseThread=python310.PyEval_ReleaseThread + PyEval_RestoreThread=python310.PyEval_RestoreThread + PyEval_SaveThread=python310.PyEval_SaveThread + PyEval_ThreadsInitialized=python310.PyEval_ThreadsInitialized + PyExc_ArithmeticError=python310.PyExc_ArithmeticError DATA + PyExc_AssertionError=python310.PyExc_AssertionError DATA + PyExc_AttributeError=python310.PyExc_AttributeError DATA + PyExc_BaseException=python310.PyExc_BaseException DATA + PyExc_BlockingIOError=python310.PyExc_BlockingIOError DATA + PyExc_BrokenPipeError=python310.PyExc_BrokenPipeError DATA + PyExc_BufferError=python310.PyExc_BufferError DATA + PyExc_BytesWarning=python310.PyExc_BytesWarning DATA + PyExc_ChildProcessError=python310.PyExc_ChildProcessError DATA + PyExc_ConnectionAbortedError=python310.PyExc_ConnectionAbortedError DATA + PyExc_ConnectionError=python310.PyExc_ConnectionError DATA + PyExc_ConnectionRefusedError=python310.PyExc_ConnectionRefusedError DATA + PyExc_ConnectionResetError=python310.PyExc_ConnectionResetError DATA + PyExc_DeprecationWarning=python310.PyExc_DeprecationWarning DATA + PyExc_EOFError=python310.PyExc_EOFError DATA + PyExc_EnvironmentError=python310.PyExc_EnvironmentError DATA + PyExc_Exception=python310.PyExc_Exception DATA + PyExc_FileExistsError=python310.PyExc_FileExistsError DATA + PyExc_FileNotFoundError=python310.PyExc_FileNotFoundError DATA + PyExc_FloatingPointError=python310.PyExc_FloatingPointError DATA + PyExc_FutureWarning=python310.PyExc_FutureWarning DATA + PyExc_GeneratorExit=python310.PyExc_GeneratorExit DATA + PyExc_IOError=python310.PyExc_IOError DATA + PyExc_ImportError=python310.PyExc_ImportError DATA + PyExc_ImportWarning=python310.PyExc_ImportWarning DATA + PyExc_IndentationError=python310.PyExc_IndentationError DATA + PyExc_IndexError=python310.PyExc_IndexError DATA + PyExc_InterruptedError=python310.PyExc_InterruptedError DATA + PyExc_IsADirectoryError=python310.PyExc_IsADirectoryError DATA + PyExc_KeyError=python310.PyExc_KeyError DATA + PyExc_KeyboardInterrupt=python310.PyExc_KeyboardInterrupt DATA + PyExc_LookupError=python310.PyExc_LookupError DATA + PyExc_MemoryError=python310.PyExc_MemoryError DATA + PyExc_ModuleNotFoundError=python310.PyExc_ModuleNotFoundError DATA + PyExc_NameError=python310.PyExc_NameError DATA + PyExc_NotADirectoryError=python310.PyExc_NotADirectoryError DATA + PyExc_NotImplementedError=python310.PyExc_NotImplementedError DATA + PyExc_OSError=python310.PyExc_OSError DATA + PyExc_OverflowError=python310.PyExc_OverflowError DATA + PyExc_PendingDeprecationWarning=python310.PyExc_PendingDeprecationWarning DATA + PyExc_PermissionError=python310.PyExc_PermissionError DATA + PyExc_ProcessLookupError=python310.PyExc_ProcessLookupError DATA + PyExc_RecursionError=python310.PyExc_RecursionError DATA + PyExc_ReferenceError=python310.PyExc_ReferenceError DATA + PyExc_ResourceWarning=python310.PyExc_ResourceWarning DATA + PyExc_RuntimeError=python310.PyExc_RuntimeError DATA + PyExc_RuntimeWarning=python310.PyExc_RuntimeWarning DATA + PyExc_StopAsyncIteration=python310.PyExc_StopAsyncIteration DATA + PyExc_StopIteration=python310.PyExc_StopIteration DATA + PyExc_SyntaxError=python310.PyExc_SyntaxError DATA + PyExc_SyntaxWarning=python310.PyExc_SyntaxWarning DATA + PyExc_SystemError=python310.PyExc_SystemError DATA + PyExc_SystemExit=python310.PyExc_SystemExit DATA + PyExc_TabError=python310.PyExc_TabError DATA + PyExc_TimeoutError=python310.PyExc_TimeoutError DATA + PyExc_TypeError=python310.PyExc_TypeError DATA + PyExc_UnboundLocalError=python310.PyExc_UnboundLocalError DATA + PyExc_UnicodeDecodeError=python310.PyExc_UnicodeDecodeError DATA + PyExc_UnicodeEncodeError=python310.PyExc_UnicodeEncodeError DATA + PyExc_UnicodeError=python310.PyExc_UnicodeError DATA + PyExc_UnicodeTranslateError=python310.PyExc_UnicodeTranslateError DATA + PyExc_UnicodeWarning=python310.PyExc_UnicodeWarning DATA + PyExc_UserWarning=python310.PyExc_UserWarning DATA + PyExc_ValueError=python310.PyExc_ValueError DATA + PyExc_Warning=python310.PyExc_Warning DATA + PyExc_WindowsError=python310.PyExc_WindowsError DATA + PyExc_ZeroDivisionError=python310.PyExc_ZeroDivisionError DATA + PyExceptionClass_Name=python310.PyExceptionClass_Name + PyException_GetCause=python310.PyException_GetCause + PyException_GetContext=python310.PyException_GetContext + PyException_GetTraceback=python310.PyException_GetTraceback + PyException_SetCause=python310.PyException_SetCause + PyException_SetContext=python310.PyException_SetContext + PyException_SetTraceback=python310.PyException_SetTraceback + PyFile_FromFd=python310.PyFile_FromFd + PyFile_GetLine=python310.PyFile_GetLine + PyFile_WriteObject=python310.PyFile_WriteObject + PyFile_WriteString=python310.PyFile_WriteString + PyFilter_Type=python310.PyFilter_Type DATA + PyFloat_AsDouble=python310.PyFloat_AsDouble + PyFloat_FromDouble=python310.PyFloat_FromDouble + PyFloat_FromString=python310.PyFloat_FromString + PyFloat_GetInfo=python310.PyFloat_GetInfo + PyFloat_GetMax=python310.PyFloat_GetMax + PyFloat_GetMin=python310.PyFloat_GetMin + PyFloat_Type=python310.PyFloat_Type DATA + PyFrozenSet_New=python310.PyFrozenSet_New + PyFrozenSet_Type=python310.PyFrozenSet_Type DATA + PyGC_Collect=python310.PyGC_Collect + PyGILState_Ensure=python310.PyGILState_Ensure + PyGILState_GetThisThreadState=python310.PyGILState_GetThisThreadState + PyGILState_Release=python310.PyGILState_Release + PyGetSetDescr_Type=python310.PyGetSetDescr_Type DATA + PyImport_AddModule=python310.PyImport_AddModule + PyImport_AddModuleObject=python310.PyImport_AddModuleObject + PyImport_AppendInittab=python310.PyImport_AppendInittab + PyImport_Cleanup=python310.PyImport_Cleanup + PyImport_ExecCodeModule=python310.PyImport_ExecCodeModule + PyImport_ExecCodeModuleEx=python310.PyImport_ExecCodeModuleEx + PyImport_ExecCodeModuleObject=python310.PyImport_ExecCodeModuleObject + PyImport_ExecCodeModuleWithPathnames=python310.PyImport_ExecCodeModuleWithPathnames + PyImport_GetImporter=python310.PyImport_GetImporter + PyImport_GetMagicNumber=python310.PyImport_GetMagicNumber + PyImport_GetMagicTag=python310.PyImport_GetMagicTag + PyImport_GetModule=python310.PyImport_GetModule + PyImport_GetModuleDict=python310.PyImport_GetModuleDict + PyImport_Import=python310.PyImport_Import + PyImport_ImportFrozenModule=python310.PyImport_ImportFrozenModule + PyImport_ImportFrozenModuleObject=python310.PyImport_ImportFrozenModuleObject + PyImport_ImportModule=python310.PyImport_ImportModule + PyImport_ImportModuleLevel=python310.PyImport_ImportModuleLevel + PyImport_ImportModuleLevelObject=python310.PyImport_ImportModuleLevelObject + PyImport_ImportModuleNoBlock=python310.PyImport_ImportModuleNoBlock + PyImport_ReloadModule=python310.PyImport_ReloadModule + PyIndex_Check=python310.PyIndex_Check + PyInterpreterState_Clear=python310.PyInterpreterState_Clear + PyInterpreterState_Delete=python310.PyInterpreterState_Delete + PyInterpreterState_New=python310.PyInterpreterState_New + PyIter_Check=python310.PyIter_Check + PyIter_Next=python310.PyIter_Next + PyListIter_Type=python310.PyListIter_Type DATA + PyListRevIter_Type=python310.PyListRevIter_Type DATA + PyList_Append=python310.PyList_Append + PyList_AsTuple=python310.PyList_AsTuple + PyList_GetItem=python310.PyList_GetItem + PyList_GetSlice=python310.PyList_GetSlice + PyList_Insert=python310.PyList_Insert + PyList_New=python310.PyList_New + PyList_Reverse=python310.PyList_Reverse + PyList_SetItem=python310.PyList_SetItem + PyList_SetSlice=python310.PyList_SetSlice + PyList_Size=python310.PyList_Size + PyList_Sort=python310.PyList_Sort + PyList_Type=python310.PyList_Type DATA + PyLongRangeIter_Type=python310.PyLongRangeIter_Type DATA + PyLong_AsDouble=python310.PyLong_AsDouble + PyLong_AsLong=python310.PyLong_AsLong + PyLong_AsLongAndOverflow=python310.PyLong_AsLongAndOverflow + PyLong_AsLongLong=python310.PyLong_AsLongLong + PyLong_AsLongLongAndOverflow=python310.PyLong_AsLongLongAndOverflow + PyLong_AsSize_t=python310.PyLong_AsSize_t + PyLong_AsSsize_t=python310.PyLong_AsSsize_t + PyLong_AsUnsignedLong=python310.PyLong_AsUnsignedLong + PyLong_AsUnsignedLongLong=python310.PyLong_AsUnsignedLongLong + PyLong_AsUnsignedLongLongMask=python310.PyLong_AsUnsignedLongLongMask + PyLong_AsUnsignedLongMask=python310.PyLong_AsUnsignedLongMask + PyLong_AsVoidPtr=python310.PyLong_AsVoidPtr + PyLong_FromDouble=python310.PyLong_FromDouble + PyLong_FromLong=python310.PyLong_FromLong + PyLong_FromLongLong=python310.PyLong_FromLongLong + PyLong_FromSize_t=python310.PyLong_FromSize_t + PyLong_FromSsize_t=python310.PyLong_FromSsize_t + PyLong_FromString=python310.PyLong_FromString + PyLong_FromUnsignedLong=python310.PyLong_FromUnsignedLong + PyLong_FromUnsignedLongLong=python310.PyLong_FromUnsignedLongLong + PyLong_FromVoidPtr=python310.PyLong_FromVoidPtr + PyLong_GetInfo=python310.PyLong_GetInfo + PyLong_Type=python310.PyLong_Type DATA + PyMap_Type=python310.PyMap_Type DATA + PyMapping_Check=python310.PyMapping_Check + PyMapping_GetItemString=python310.PyMapping_GetItemString + PyMapping_HasKey=python310.PyMapping_HasKey + PyMapping_HasKeyString=python310.PyMapping_HasKeyString + PyMapping_Items=python310.PyMapping_Items + PyMapping_Keys=python310.PyMapping_Keys + PyMapping_Length=python310.PyMapping_Length + PyMapping_SetItemString=python310.PyMapping_SetItemString + PyMapping_Size=python310.PyMapping_Size + PyMapping_Values=python310.PyMapping_Values + PyMem_Calloc=python310.PyMem_Calloc + PyMem_Free=python310.PyMem_Free + PyMem_Malloc=python310.PyMem_Malloc + PyMem_Realloc=python310.PyMem_Realloc + PyMemberDescr_Type=python310.PyMemberDescr_Type DATA + PyMemoryView_FromMemory=python310.PyMemoryView_FromMemory + PyMemoryView_FromObject=python310.PyMemoryView_FromObject + PyMemoryView_GetContiguous=python310.PyMemoryView_GetContiguous + PyMemoryView_Type=python310.PyMemoryView_Type DATA + PyMethodDescr_Type=python310.PyMethodDescr_Type DATA + PyModuleDef_Init=python310.PyModuleDef_Init + PyModuleDef_Type=python310.PyModuleDef_Type DATA + PyModule_AddFunctions=python310.PyModule_AddFunctions + PyModule_AddIntConstant=python310.PyModule_AddIntConstant + PyModule_AddObject=python310.PyModule_AddObject + PyModule_AddStringConstant=python310.PyModule_AddStringConstant + PyModule_Create2=python310.PyModule_Create2 + PyModule_ExecDef=python310.PyModule_ExecDef + PyModule_FromDefAndSpec2=python310.PyModule_FromDefAndSpec2 + PyModule_GetDef=python310.PyModule_GetDef + PyModule_GetDict=python310.PyModule_GetDict + PyModule_GetFilename=python310.PyModule_GetFilename + PyModule_GetFilenameObject=python310.PyModule_GetFilenameObject + PyModule_GetName=python310.PyModule_GetName + PyModule_GetNameObject=python310.PyModule_GetNameObject + PyModule_GetState=python310.PyModule_GetState + PyModule_New=python310.PyModule_New + PyModule_NewObject=python310.PyModule_NewObject + PyModule_SetDocString=python310.PyModule_SetDocString + PyModule_Type=python310.PyModule_Type DATA + PyNullImporter_Type=python310.PyNullImporter_Type DATA + PyNumber_Absolute=python310.PyNumber_Absolute + PyNumber_Add=python310.PyNumber_Add + PyNumber_And=python310.PyNumber_And + PyNumber_AsSsize_t=python310.PyNumber_AsSsize_t + PyNumber_Check=python310.PyNumber_Check + PyNumber_Divmod=python310.PyNumber_Divmod + PyNumber_Float=python310.PyNumber_Float + PyNumber_FloorDivide=python310.PyNumber_FloorDivide + PyNumber_InPlaceAdd=python310.PyNumber_InPlaceAdd + PyNumber_InPlaceAnd=python310.PyNumber_InPlaceAnd + PyNumber_InPlaceFloorDivide=python310.PyNumber_InPlaceFloorDivide + PyNumber_InPlaceLshift=python310.PyNumber_InPlaceLshift + PyNumber_InPlaceMatrixMultiply=python310.PyNumber_InPlaceMatrixMultiply + PyNumber_InPlaceMultiply=python310.PyNumber_InPlaceMultiply + PyNumber_InPlaceOr=python310.PyNumber_InPlaceOr + PyNumber_InPlacePower=python310.PyNumber_InPlacePower + PyNumber_InPlaceRemainder=python310.PyNumber_InPlaceRemainder + PyNumber_InPlaceRshift=python310.PyNumber_InPlaceRshift + PyNumber_InPlaceSubtract=python310.PyNumber_InPlaceSubtract + PyNumber_InPlaceTrueDivide=python310.PyNumber_InPlaceTrueDivide + PyNumber_InPlaceXor=python310.PyNumber_InPlaceXor + PyNumber_Index=python310.PyNumber_Index + PyNumber_Invert=python310.PyNumber_Invert + PyNumber_Long=python310.PyNumber_Long + PyNumber_Lshift=python310.PyNumber_Lshift + PyNumber_MatrixMultiply=python310.PyNumber_MatrixMultiply + PyNumber_Multiply=python310.PyNumber_Multiply + PyNumber_Negative=python310.PyNumber_Negative + PyNumber_Or=python310.PyNumber_Or + PyNumber_Positive=python310.PyNumber_Positive + PyNumber_Power=python310.PyNumber_Power + PyNumber_Remainder=python310.PyNumber_Remainder + PyNumber_Rshift=python310.PyNumber_Rshift + PyNumber_Subtract=python310.PyNumber_Subtract + PyNumber_ToBase=python310.PyNumber_ToBase + PyNumber_TrueDivide=python310.PyNumber_TrueDivide + PyNumber_Xor=python310.PyNumber_Xor + PyODictItems_Type=python310.PyODictItems_Type DATA + PyODictIter_Type=python310.PyODictIter_Type DATA + PyODictKeys_Type=python310.PyODictKeys_Type DATA + PyODictValues_Type=python310.PyODictValues_Type DATA + PyODict_DelItem=python310.PyODict_DelItem + PyODict_New=python310.PyODict_New + PyODict_SetItem=python310.PyODict_SetItem + PyODict_Type=python310.PyODict_Type DATA + PyOS_AfterFork=python310.PyOS_AfterFork + PyOS_CheckStack=python310.PyOS_CheckStack + PyOS_FSPath=python310.PyOS_FSPath + PyOS_InitInterrupts=python310.PyOS_InitInterrupts + PyOS_InputHook=python310.PyOS_InputHook DATA + PyOS_InterruptOccurred=python310.PyOS_InterruptOccurred + PyOS_ReadlineFunctionPointer=python310.PyOS_ReadlineFunctionPointer DATA + PyOS_double_to_string=python310.PyOS_double_to_string + PyOS_getsig=python310.PyOS_getsig + PyOS_mystricmp=python310.PyOS_mystricmp + PyOS_mystrnicmp=python310.PyOS_mystrnicmp + PyOS_setsig=python310.PyOS_setsig + PyOS_snprintf=python310.PyOS_snprintf + PyOS_string_to_double=python310.PyOS_string_to_double + PyOS_strtol=python310.PyOS_strtol + PyOS_strtoul=python310.PyOS_strtoul + PyOS_vsnprintf=python310.PyOS_vsnprintf + PyObject_ASCII=python310.PyObject_ASCII + PyObject_AsCharBuffer=python310.PyObject_AsCharBuffer + PyObject_AsFileDescriptor=python310.PyObject_AsFileDescriptor + PyObject_AsReadBuffer=python310.PyObject_AsReadBuffer + PyObject_AsWriteBuffer=python310.PyObject_AsWriteBuffer + PyObject_Bytes=python310.PyObject_Bytes + PyObject_Call=python310.PyObject_Call + PyObject_CallFunction=python310.PyObject_CallFunction + PyObject_CallFunctionObjArgs=python310.PyObject_CallFunctionObjArgs + PyObject_CallMethod=python310.PyObject_CallMethod + PyObject_CallMethodObjArgs=python310.PyObject_CallMethodObjArgs + PyObject_CallObject=python310.PyObject_CallObject + PyObject_Calloc=python310.PyObject_Calloc + PyObject_CheckReadBuffer=python310.PyObject_CheckReadBuffer + PyObject_ClearWeakRefs=python310.PyObject_ClearWeakRefs + PyObject_DelItem=python310.PyObject_DelItem + PyObject_DelItemString=python310.PyObject_DelItemString + PyObject_Dir=python310.PyObject_Dir + PyObject_Format=python310.PyObject_Format + PyObject_Free=python310.PyObject_Free + PyObject_GC_Del=python310.PyObject_GC_Del + PyObject_GC_Track=python310.PyObject_GC_Track + PyObject_GC_UnTrack=python310.PyObject_GC_UnTrack + PyObject_GenericGetAttr=python310.PyObject_GenericGetAttr + PyObject_GenericSetAttr=python310.PyObject_GenericSetAttr + PyObject_GenericSetDict=python310.PyObject_GenericSetDict + PyObject_GetAttr=python310.PyObject_GetAttr + PyObject_GetAttrString=python310.PyObject_GetAttrString + PyObject_GetItem=python310.PyObject_GetItem + PyObject_GetIter=python310.PyObject_GetIter + PyObject_HasAttr=python310.PyObject_HasAttr + PyObject_HasAttrString=python310.PyObject_HasAttrString + PyObject_Hash=python310.PyObject_Hash + PyObject_HashNotImplemented=python310.PyObject_HashNotImplemented + PyObject_Init=python310.PyObject_Init + PyObject_InitVar=python310.PyObject_InitVar + PyObject_IsInstance=python310.PyObject_IsInstance + PyObject_IsSubclass=python310.PyObject_IsSubclass + PyObject_IsTrue=python310.PyObject_IsTrue + PyObject_Length=python310.PyObject_Length + PyObject_Malloc=python310.PyObject_Malloc + PyObject_Not=python310.PyObject_Not + PyObject_Realloc=python310.PyObject_Realloc + PyObject_Repr=python310.PyObject_Repr + PyObject_RichCompare=python310.PyObject_RichCompare + PyObject_RichCompareBool=python310.PyObject_RichCompareBool + PyObject_SelfIter=python310.PyObject_SelfIter + PyObject_SetAttr=python310.PyObject_SetAttr + PyObject_SetAttrString=python310.PyObject_SetAttrString + PyObject_SetItem=python310.PyObject_SetItem + PyObject_Size=python310.PyObject_Size + PyObject_Str=python310.PyObject_Str + PyObject_Type=python310.PyObject_Type + PyParser_SimpleParseFileFlags=python310.PyParser_SimpleParseFileFlags + PyParser_SimpleParseStringFlags=python310.PyParser_SimpleParseStringFlags + PyParser_SimpleParseStringFlagsFilename=python310.PyParser_SimpleParseStringFlagsFilename + PyProperty_Type=python310.PyProperty_Type DATA + PyRangeIter_Type=python310.PyRangeIter_Type DATA + PyRange_Type=python310.PyRange_Type DATA + PyReversed_Type=python310.PyReversed_Type DATA + PySeqIter_New=python310.PySeqIter_New + PySeqIter_Type=python310.PySeqIter_Type DATA + PySequence_Check=python310.PySequence_Check + PySequence_Concat=python310.PySequence_Concat + PySequence_Contains=python310.PySequence_Contains + PySequence_Count=python310.PySequence_Count + PySequence_DelItem=python310.PySequence_DelItem + PySequence_DelSlice=python310.PySequence_DelSlice + PySequence_Fast=python310.PySequence_Fast + PySequence_GetItem=python310.PySequence_GetItem + PySequence_GetSlice=python310.PySequence_GetSlice + PySequence_In=python310.PySequence_In + PySequence_InPlaceConcat=python310.PySequence_InPlaceConcat + PySequence_InPlaceRepeat=python310.PySequence_InPlaceRepeat + PySequence_Index=python310.PySequence_Index + PySequence_Length=python310.PySequence_Length + PySequence_List=python310.PySequence_List + PySequence_Repeat=python310.PySequence_Repeat + PySequence_SetItem=python310.PySequence_SetItem + PySequence_SetSlice=python310.PySequence_SetSlice + PySequence_Size=python310.PySequence_Size + PySequence_Tuple=python310.PySequence_Tuple + PySetIter_Type=python310.PySetIter_Type DATA + PySet_Add=python310.PySet_Add + PySet_Clear=python310.PySet_Clear + PySet_Contains=python310.PySet_Contains + PySet_Discard=python310.PySet_Discard + PySet_New=python310.PySet_New + PySet_Pop=python310.PySet_Pop + PySet_Size=python310.PySet_Size + PySet_Type=python310.PySet_Type DATA + PySlice_AdjustIndices=python310.PySlice_AdjustIndices + PySlice_GetIndices=python310.PySlice_GetIndices + PySlice_GetIndicesEx=python310.PySlice_GetIndicesEx + PySlice_New=python310.PySlice_New + PySlice_Type=python310.PySlice_Type DATA + PySlice_Unpack=python310.PySlice_Unpack + PySortWrapper_Type=python310.PySortWrapper_Type DATA + PyInterpreterState_GetID=python310.PyInterpreterState_GetID + PyState_AddModule=python310.PyState_AddModule + PyState_FindModule=python310.PyState_FindModule + PyState_RemoveModule=python310.PyState_RemoveModule + PyStructSequence_GetItem=python310.PyStructSequence_GetItem + PyStructSequence_New=python310.PyStructSequence_New + PyStructSequence_NewType=python310.PyStructSequence_NewType + PyStructSequence_SetItem=python310.PyStructSequence_SetItem + PySuper_Type=python310.PySuper_Type DATA + PySys_AddWarnOption=python310.PySys_AddWarnOption + PySys_AddWarnOptionUnicode=python310.PySys_AddWarnOptionUnicode + PySys_AddXOption=python310.PySys_AddXOption + PySys_FormatStderr=python310.PySys_FormatStderr + PySys_FormatStdout=python310.PySys_FormatStdout + PySys_GetObject=python310.PySys_GetObject + PySys_GetXOptions=python310.PySys_GetXOptions + PySys_HasWarnOptions=python310.PySys_HasWarnOptions + PySys_ResetWarnOptions=python310.PySys_ResetWarnOptions + PySys_SetArgv=python310.PySys_SetArgv + PySys_SetArgvEx=python310.PySys_SetArgvEx + PySys_SetObject=python310.PySys_SetObject + PySys_SetPath=python310.PySys_SetPath + PySys_WriteStderr=python310.PySys_WriteStderr + PySys_WriteStdout=python310.PySys_WriteStdout + PyThreadState_Clear=python310.PyThreadState_Clear + PyThreadState_Delete=python310.PyThreadState_Delete + PyThreadState_DeleteCurrent=python310.PyThreadState_DeleteCurrent + PyThreadState_Get=python310.PyThreadState_Get + PyThreadState_GetDict=python310.PyThreadState_GetDict + PyThreadState_New=python310.PyThreadState_New + PyThreadState_SetAsyncExc=python310.PyThreadState_SetAsyncExc + PyThreadState_Swap=python310.PyThreadState_Swap + PyThread_tss_alloc=python310.PyThread_tss_alloc + PyThread_tss_create=python310.PyThread_tss_create + PyThread_tss_delete=python310.PyThread_tss_delete + PyThread_tss_free=python310.PyThread_tss_free + PyThread_tss_get=python310.PyThread_tss_get + PyThread_tss_is_created=python310.PyThread_tss_is_created + PyThread_tss_set=python310.PyThread_tss_set + PyTraceBack_Here=python310.PyTraceBack_Here + PyTraceBack_Print=python310.PyTraceBack_Print + PyTraceBack_Type=python310.PyTraceBack_Type DATA + PyTupleIter_Type=python310.PyTupleIter_Type DATA + PyTuple_GetItem=python310.PyTuple_GetItem + PyTuple_GetSlice=python310.PyTuple_GetSlice + PyTuple_New=python310.PyTuple_New + PyTuple_Pack=python310.PyTuple_Pack + PyTuple_SetItem=python310.PyTuple_SetItem + PyTuple_Size=python310.PyTuple_Size + PyTuple_Type=python310.PyTuple_Type DATA + PyType_ClearCache=python310.PyType_ClearCache + PyType_FromSpec=python310.PyType_FromSpec + PyType_FromSpecWithBases=python310.PyType_FromSpecWithBases + PyType_GenericAlloc=python310.PyType_GenericAlloc + PyType_GenericNew=python310.PyType_GenericNew + PyType_GetFlags=python310.PyType_GetFlags + PyType_GetSlot=python310.PyType_GetSlot + PyType_IsSubtype=python310.PyType_IsSubtype + PyType_Modified=python310.PyType_Modified + PyType_Ready=python310.PyType_Ready + PyType_Type=python310.PyType_Type DATA + PyUnicodeDecodeError_Create=python310.PyUnicodeDecodeError_Create + PyUnicodeDecodeError_GetEncoding=python310.PyUnicodeDecodeError_GetEncoding + PyUnicodeDecodeError_GetEnd=python310.PyUnicodeDecodeError_GetEnd + PyUnicodeDecodeError_GetObject=python310.PyUnicodeDecodeError_GetObject + PyUnicodeDecodeError_GetReason=python310.PyUnicodeDecodeError_GetReason + PyUnicodeDecodeError_GetStart=python310.PyUnicodeDecodeError_GetStart + PyUnicodeDecodeError_SetEnd=python310.PyUnicodeDecodeError_SetEnd + PyUnicodeDecodeError_SetReason=python310.PyUnicodeDecodeError_SetReason + PyUnicodeDecodeError_SetStart=python310.PyUnicodeDecodeError_SetStart + PyUnicodeEncodeError_GetEncoding=python310.PyUnicodeEncodeError_GetEncoding + PyUnicodeEncodeError_GetEnd=python310.PyUnicodeEncodeError_GetEnd + PyUnicodeEncodeError_GetObject=python310.PyUnicodeEncodeError_GetObject + PyUnicodeEncodeError_GetReason=python310.PyUnicodeEncodeError_GetReason + PyUnicodeEncodeError_GetStart=python310.PyUnicodeEncodeError_GetStart + PyUnicodeEncodeError_SetEnd=python310.PyUnicodeEncodeError_SetEnd + PyUnicodeEncodeError_SetReason=python310.PyUnicodeEncodeError_SetReason + PyUnicodeEncodeError_SetStart=python310.PyUnicodeEncodeError_SetStart + PyUnicodeIter_Type=python310.PyUnicodeIter_Type DATA + PyUnicodeTranslateError_GetEnd=python310.PyUnicodeTranslateError_GetEnd + PyUnicodeTranslateError_GetObject=python310.PyUnicodeTranslateError_GetObject + PyUnicodeTranslateError_GetReason=python310.PyUnicodeTranslateError_GetReason + PyUnicodeTranslateError_GetStart=python310.PyUnicodeTranslateError_GetStart + PyUnicodeTranslateError_SetEnd=python310.PyUnicodeTranslateError_SetEnd + PyUnicodeTranslateError_SetReason=python310.PyUnicodeTranslateError_SetReason + PyUnicodeTranslateError_SetStart=python310.PyUnicodeTranslateError_SetStart + PyUnicode_Append=python310.PyUnicode_Append + PyUnicode_AppendAndDel=python310.PyUnicode_AppendAndDel + PyUnicode_AsASCIIString=python310.PyUnicode_AsASCIIString + PyUnicode_AsCharmapString=python310.PyUnicode_AsCharmapString + PyUnicode_AsDecodedObject=python310.PyUnicode_AsDecodedObject + PyUnicode_AsDecodedUnicode=python310.PyUnicode_AsDecodedUnicode + PyUnicode_AsEncodedObject=python310.PyUnicode_AsEncodedObject + PyUnicode_AsEncodedString=python310.PyUnicode_AsEncodedString + PyUnicode_AsEncodedUnicode=python310.PyUnicode_AsEncodedUnicode + PyUnicode_AsLatin1String=python310.PyUnicode_AsLatin1String + PyUnicode_AsMBCSString=python310.PyUnicode_AsMBCSString + PyUnicode_AsRawUnicodeEscapeString=python310.PyUnicode_AsRawUnicodeEscapeString + PyUnicode_AsUCS4=python310.PyUnicode_AsUCS4 + PyUnicode_AsUCS4Copy=python310.PyUnicode_AsUCS4Copy + PyUnicode_AsUTF16String=python310.PyUnicode_AsUTF16String + PyUnicode_AsUTF32String=python310.PyUnicode_AsUTF32String + PyUnicode_AsUTF8String=python310.PyUnicode_AsUTF8String + PyUnicode_AsUnicodeEscapeString=python310.PyUnicode_AsUnicodeEscapeString + PyUnicode_AsWideChar=python310.PyUnicode_AsWideChar + PyUnicode_AsWideCharString=python310.PyUnicode_AsWideCharString + PyUnicode_BuildEncodingMap=python310.PyUnicode_BuildEncodingMap + PyUnicode_Compare=python310.PyUnicode_Compare + PyUnicode_CompareWithASCIIString=python310.PyUnicode_CompareWithASCIIString + PyUnicode_Concat=python310.PyUnicode_Concat + PyUnicode_Contains=python310.PyUnicode_Contains + PyUnicode_Count=python310.PyUnicode_Count + PyUnicode_Decode=python310.PyUnicode_Decode + PyUnicode_DecodeASCII=python310.PyUnicode_DecodeASCII + PyUnicode_DecodeCharmap=python310.PyUnicode_DecodeCharmap + PyUnicode_DecodeCodePageStateful=python310.PyUnicode_DecodeCodePageStateful + PyUnicode_DecodeFSDefault=python310.PyUnicode_DecodeFSDefault + PyUnicode_DecodeFSDefaultAndSize=python310.PyUnicode_DecodeFSDefaultAndSize + PyUnicode_DecodeLatin1=python310.PyUnicode_DecodeLatin1 + PyUnicode_DecodeLocale=python310.PyUnicode_DecodeLocale + PyUnicode_DecodeLocaleAndSize=python310.PyUnicode_DecodeLocaleAndSize + PyUnicode_DecodeMBCS=python310.PyUnicode_DecodeMBCS + PyUnicode_DecodeMBCSStateful=python310.PyUnicode_DecodeMBCSStateful + PyUnicode_DecodeRawUnicodeEscape=python310.PyUnicode_DecodeRawUnicodeEscape + PyUnicode_DecodeUTF16=python310.PyUnicode_DecodeUTF16 + PyUnicode_DecodeUTF16Stateful=python310.PyUnicode_DecodeUTF16Stateful + PyUnicode_DecodeUTF32=python310.PyUnicode_DecodeUTF32 + PyUnicode_DecodeUTF32Stateful=python310.PyUnicode_DecodeUTF32Stateful + PyUnicode_DecodeUTF7=python310.PyUnicode_DecodeUTF7 + PyUnicode_DecodeUTF7Stateful=python310.PyUnicode_DecodeUTF7Stateful + PyUnicode_DecodeUTF8=python310.PyUnicode_DecodeUTF8 + PyUnicode_DecodeUTF8Stateful=python310.PyUnicode_DecodeUTF8Stateful + PyUnicode_DecodeUnicodeEscape=python310.PyUnicode_DecodeUnicodeEscape + PyUnicode_EncodeCodePage=python310.PyUnicode_EncodeCodePage + PyUnicode_EncodeFSDefault=python310.PyUnicode_EncodeFSDefault + PyUnicode_EncodeLocale=python310.PyUnicode_EncodeLocale + PyUnicode_FSConverter=python310.PyUnicode_FSConverter + PyUnicode_FSDecoder=python310.PyUnicode_FSDecoder + PyUnicode_Find=python310.PyUnicode_Find + PyUnicode_FindChar=python310.PyUnicode_FindChar + PyUnicode_Format=python310.PyUnicode_Format + PyUnicode_FromEncodedObject=python310.PyUnicode_FromEncodedObject + PyUnicode_FromFormat=python310.PyUnicode_FromFormat + PyUnicode_FromFormatV=python310.PyUnicode_FromFormatV + PyUnicode_FromObject=python310.PyUnicode_FromObject + PyUnicode_FromOrdinal=python310.PyUnicode_FromOrdinal + PyUnicode_FromString=python310.PyUnicode_FromString + PyUnicode_FromStringAndSize=python310.PyUnicode_FromStringAndSize + PyUnicode_FromWideChar=python310.PyUnicode_FromWideChar + PyUnicode_GetDefaultEncoding=python310.PyUnicode_GetDefaultEncoding + PyUnicode_GetLength=python310.PyUnicode_GetLength + PyUnicode_GetSize=python310.PyUnicode_GetSize + PyUnicode_InternFromString=python310.PyUnicode_InternFromString + PyUnicode_InternImmortal=python310.PyUnicode_InternImmortal + PyUnicode_InternInPlace=python310.PyUnicode_InternInPlace + PyUnicode_IsIdentifier=python310.PyUnicode_IsIdentifier + PyUnicode_Join=python310.PyUnicode_Join + PyUnicode_Partition=python310.PyUnicode_Partition + PyUnicode_RPartition=python310.PyUnicode_RPartition + PyUnicode_RSplit=python310.PyUnicode_RSplit + PyUnicode_ReadChar=python310.PyUnicode_ReadChar + PyUnicode_Replace=python310.PyUnicode_Replace + PyUnicode_Resize=python310.PyUnicode_Resize + PyUnicode_RichCompare=python310.PyUnicode_RichCompare + PyUnicode_Split=python310.PyUnicode_Split + PyUnicode_Splitlines=python310.PyUnicode_Splitlines + PyUnicode_Substring=python310.PyUnicode_Substring + PyUnicode_Tailmatch=python310.PyUnicode_Tailmatch + PyUnicode_Translate=python310.PyUnicode_Translate + PyUnicode_Type=python310.PyUnicode_Type DATA + PyUnicode_WriteChar=python310.PyUnicode_WriteChar + PyWeakref_GetObject=python310.PyWeakref_GetObject + PyWeakref_NewProxy=python310.PyWeakref_NewProxy + PyWeakref_NewRef=python310.PyWeakref_NewRef + PyWrapperDescr_Type=python310.PyWrapperDescr_Type DATA + PyWrapper_New=python310.PyWrapper_New + PyZip_Type=python310.PyZip_Type DATA + Py_AddPendingCall=python310.Py_AddPendingCall + Py_AtExit=python310.Py_AtExit + Py_BuildValue=python310.Py_BuildValue + Py_CompileString=python310.Py_CompileString + Py_DecRef=python310.Py_DecRef + Py_DecodeLocale=python310.Py_DecodeLocale + Py_EncodeLocale=python310.Py_EncodeLocale + Py_EndInterpreter=python310.Py_EndInterpreter + Py_EnterRecursiveCall=python310.Py_EnterRecursiveCall + Py_Exit=python310.Py_Exit + Py_FatalError=python310.Py_FatalError + Py_FileSystemDefaultEncodeErrors=python310.Py_FileSystemDefaultEncodeErrors DATA + Py_FileSystemDefaultEncoding=python310.Py_FileSystemDefaultEncoding DATA + Py_Finalize=python310.Py_Finalize + Py_FinalizeEx=python310.Py_FinalizeEx + Py_GenericAlias=python310.Py_GenericAlias + Py_GenericAliasType=python310.Py_GenericAliasType + Py_GetBuildInfo=python310.Py_GetBuildInfo + Py_GetCompiler=python310.Py_GetCompiler + Py_GetCopyright=python310.Py_GetCopyright + Py_GetExecPrefix=python310.Py_GetExecPrefix + Py_GetPath=python310.Py_GetPath + Py_GetPlatform=python310.Py_GetPlatform + Py_GetPrefix=python310.Py_GetPrefix + Py_GetProgramFullPath=python310.Py_GetProgramFullPath + Py_GetProgramName=python310.Py_GetProgramName + Py_GetPythonHome=python310.Py_GetPythonHome + Py_GetRecursionLimit=python310.Py_GetRecursionLimit + Py_GetVersion=python310.Py_GetVersion + Py_HasFileSystemDefaultEncoding=python310.Py_HasFileSystemDefaultEncoding DATA + Py_IncRef=python310.Py_IncRef + Py_Initialize=python310.Py_Initialize + Py_InitializeEx=python310.Py_InitializeEx + Py_IsInitialized=python310.Py_IsInitialized + Py_LeaveRecursiveCall=python310.Py_LeaveRecursiveCall + Py_Main=python310.Py_Main + Py_MakePendingCalls=python310.Py_MakePendingCalls + Py_NewInterpreter=python310.Py_NewInterpreter + Py_ReprEnter=python310.Py_ReprEnter + Py_ReprLeave=python310.Py_ReprLeave + Py_SetPath=python310.Py_SetPath + Py_SetProgramName=python310.Py_SetProgramName + Py_SetPythonHome=python310.Py_SetPythonHome + Py_SetRecursionLimit=python310.Py_SetRecursionLimit + Py_SymtableString=python310.Py_SymtableString + Py_UTF8Mode=python310.Py_UTF8Mode DATA + Py_VaBuildValue=python310.Py_VaBuildValue + _PyArg_ParseTupleAndKeywords_SizeT=python310._PyArg_ParseTupleAndKeywords_SizeT + _PyArg_ParseTuple_SizeT=python310._PyArg_ParseTuple_SizeT + _PyArg_Parse_SizeT=python310._PyArg_Parse_SizeT + _PyArg_VaParseTupleAndKeywords_SizeT=python310._PyArg_VaParseTupleAndKeywords_SizeT + _PyArg_VaParse_SizeT=python310._PyArg_VaParse_SizeT + _PyErr_BadInternalCall=python310._PyErr_BadInternalCall + _PyObject_CallFunction_SizeT=python310._PyObject_CallFunction_SizeT + _PyObject_CallMethod_SizeT=python310._PyObject_CallMethod_SizeT + _PyObject_GC_Malloc=python310._PyObject_GC_Malloc + _PyObject_GC_New=python310._PyObject_GC_New + _PyObject_GC_NewVar=python310._PyObject_GC_NewVar + _PyObject_GC_Resize=python310._PyObject_GC_Resize + _PyObject_New=python310._PyObject_New + _PyObject_NewVar=python310._PyObject_NewVar + _PyState_AddModule=python310._PyState_AddModule + _PyThreadState_Init=python310._PyThreadState_Init + _PyThreadState_Prealloc=python310._PyThreadState_Prealloc + _PyTrash_delete_later=python310._PyTrash_delete_later DATA + _PyTrash_delete_nesting=python310._PyTrash_delete_nesting DATA + _PyTrash_deposit_object=python310._PyTrash_deposit_object + _PyTrash_destroy_chain=python310._PyTrash_destroy_chain + _PyTrash_thread_deposit_object=python310._PyTrash_thread_deposit_object + _PyTrash_thread_destroy_chain=python310._PyTrash_thread_destroy_chain + _PyWeakref_CallableProxyType=python310._PyWeakref_CallableProxyType DATA + _PyWeakref_ProxyType=python310._PyWeakref_ProxyType DATA + _PyWeakref_RefType=python310._PyWeakref_RefType DATA + _Py_BuildValue_SizeT=python310._Py_BuildValue_SizeT + _Py_CheckRecursionLimit=python310._Py_CheckRecursionLimit DATA + _Py_CheckRecursiveCall=python310._Py_CheckRecursiveCall + _Py_Dealloc=python310._Py_Dealloc + _Py_EllipsisObject=python310._Py_EllipsisObject DATA + _Py_FalseStruct=python310._Py_FalseStruct DATA + _Py_NoneStruct=python310._Py_NoneStruct DATA + _Py_NotImplementedStruct=python310._Py_NotImplementedStruct DATA + _Py_SwappedOp=python310._Py_SwappedOp DATA + _Py_TrueStruct=python310._Py_TrueStruct DATA + _Py_VaBuildValue_SizeT=python310._Py_VaBuildValue_SizeT diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt index c44910e9bfcf0..4d4a706b8e4b9 100644 --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -38,7 +38,7 @@ Debug Used to build Python with extra debugging capabilities, equivalent to using ./configure --with-pydebug on UNIX. All binaries built using this configuration have "_d" added to their name: - python39_d.dll, python_d.exe, parser_d.pyd, and so on. Both the + python310_d.dll, python_d.exe, parser_d.pyd, and so on. Both the build and rt (run test) batch files in this directory accept a -d option for debug builds. If you are building Python to help with development of CPython, you will most likely use this configuration. diff --git a/README.rst b/README.rst index 82303953ecda5..96c9561e7c8eb 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -This is Python version 3.9.0 alpha 6 -==================================== +This is Python version 3.10.0 alpha 0 +===================================== .. image:: https://travis-ci.org/python/cpython.svg?branch=master :alt: CPython build status on Travis CI @@ -141,7 +141,7 @@ What's New ---------- We have a comprehensive overview of the changes in the `What's New in Python -3.9 `_ document. For a more +3.10 `_ document. For a more detailed change log, read `Misc/NEWS `_, but a full accounting of changes can only be gleaned from the `commit history @@ -154,7 +154,7 @@ entitled "Installing multiple versions". Documentation ------------- -`Documentation for Python 3.9 `_ is online, +`Documentation for Python 3.10 `_ is online, updated daily. It can also be downloaded in many formats for faster access. The documentation @@ -213,8 +213,8 @@ intend to install multiple versions using the same prefix you must decide which version (if any) is your "primary" version. Install that version using ``make install``. Install all other versions using ``make altinstall``. -For example, if you want to install Python 2.7, 3.6, and 3.9 with 3.9 being the -primary version, you would execute ``make install`` in your 3.9 build directory +For example, if you want to install Python 2.7, 3.6, and 3.10 with 3.10 being the +primary version, you would execute ``make install`` in your 3.10 build directory and ``make altinstall`` in the others. diff --git a/configure b/configure index 56d66d0235289..1124412dce475 100755 --- a/configure +++ b/configure @@ -1,6 +1,6 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for python 3.9. +# Generated by GNU Autoconf 2.69 for python 3.10. # # Report bugs to . # @@ -580,8 +580,8 @@ MAKEFLAGS= # Identity of this package. PACKAGE_NAME='python' PACKAGE_TARNAME='python' -PACKAGE_VERSION='3.9' -PACKAGE_STRING='python 3.9' +PACKAGE_VERSION='3.10' +PACKAGE_STRING='python 3.10' PACKAGE_BUGREPORT='https://bugs.python.org/' PACKAGE_URL='' @@ -1404,7 +1404,7 @@ if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF -\`configure' configures python 3.9 to adapt to many kinds of systems. +\`configure' configures python 3.10 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... @@ -1469,7 +1469,7 @@ fi if test -n "$ac_init_help"; then case $ac_init_help in - short | recursive ) echo "Configuration of python 3.9:";; + short | recursive ) echo "Configuration of python 3.10:";; esac cat <<\_ACEOF @@ -1673,7 +1673,7 @@ fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF -python configure 3.9 +python configure 3.10 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. @@ -2382,7 +2382,7 @@ cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. -It was created by python $as_me 3.9, which was +It was created by python $as_me 3.10, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -3002,7 +3002,7 @@ rm confdefs.h mv confdefs.h.new confdefs.h -VERSION=3.9 +VERSION=3.10 # Version number of Python's own shared library file. @@ -18118,7 +18118,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by python $as_me 3.9, which was +This file was extended by python $as_me 3.10, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES @@ -18180,7 +18180,7 @@ _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ -python config.status 3.9 +python config.status 3.10 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" diff --git a/configure.ac b/configure.ac index 497d7c191d537..84d1f00983f89 100644 --- a/configure.ac +++ b/configure.ac @@ -3,7 +3,7 @@ dnl * Please run autoreconf to test your changes! * dnl *********************************************** # Set VERSION so we only need to edit in one place (i.e., here) -m4_define(PYTHON_VERSION, 3.9) +m4_define(PYTHON_VERSION, 3.10) AC_PREREQ([2.69]) From webhook-mailer at python.org Mon May 18 23:03:36 2020 From: webhook-mailer at python.org (Kyle Stanley) Date: Tue, 19 May 2020 03:03:36 -0000 Subject: [Python-checkins] bpo-32309: Implement asyncio.to_thread() (GH-20143) Message-ID: https://github.com/python/cpython/commit/cc2bbc2227c3f5ed9d8f6b3bd052e6f9e68279d2 commit: cc2bbc2227c3f5ed9d8f6b3bd052e6f9e68279d2 branch: master author: Kyle Stanley committer: GitHub date: 2020-05-18T20:03:28-07:00 summary: bpo-32309: Implement asyncio.to_thread() (GH-20143) Implements `asyncio.to_thread`, a coroutine for asynchronously running IO-bound functions in a separate thread without blocking the event loop. See the discussion starting from [here](https://github.com/python/cpython/pull/18410#issuecomment-628930973) in GH-18410 for context. Automerge-Triggered-By: @aeros files: A Lib/asyncio/threads.py A Lib/test/test_asyncio/test_threads.py A Misc/NEWS.d/next/Library/2020-05-17-02-03-09.bpo-32309.KM9psl.rst M Doc/library/asyncio-api-index.rst M Doc/library/asyncio-task.rst M Doc/whatsnew/3.9.rst M Lib/asyncio/__init__.py diff --git a/Doc/library/asyncio-api-index.rst b/Doc/library/asyncio-api-index.rst index d5b5659abc65e..047e5bbc58cca 100644 --- a/Doc/library/asyncio-api-index.rst +++ b/Doc/library/asyncio-api-index.rst @@ -48,6 +48,9 @@ await on multiple things with timeouts. * - :class:`Task` - Task object. + * - :func:`to_thread` + - Asychronously run a function in a separate OS thread. + * - :func:`run_coroutine_threadsafe` - Schedule a coroutine from another OS thread. diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 2e963398d9300..7c2704090551b 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -602,6 +602,62 @@ Waiting Primitives # ... +Running in Threads +================== + +.. coroutinefunction:: to_thread(func, /, \*args, \*\*kwargs) + + Asynchronously run function *func* in a separate thread. + + Any \*args and \*\*kwargs supplied for this function are directly passed + to *func*. + + Return an :class:`asyncio.Future` which represents the eventual result of + *func*. + + This coroutine function is primarily intended to be used for executing + IO-bound functions/methods that would otherwise block the event loop if + they were ran in the main thread. For example:: + + def blocking_io(): + print(f"start blocking_io at {time.strftime('%X')}") + # Note that time.sleep() can be replaced with any blocking + # IO-bound operation, such as file operations. + time.sleep(1) + print(f"blocking_io complete at {time.strftime('%X')}") + + async def main(): + print(f"started main at {time.strftime('%X')}") + + await asyncio.gather( + asyncio.to_thread(blocking_io), + asyncio.sleep(1)) + + print(f"finished main at {time.strftime('%X')}") + + + asyncio.run(main()) + + # Expected output: + # + # started main at 19:50:53 + # start blocking_io at 19:50:53 + # blocking_io complete at 19:50:54 + # finished main at 19:50:54 + + Directly calling `blocking_io()` in any coroutine would block the event loop + for its duration, resulting in an additional 1 second of run time. Instead, + by using `asyncio.to_thread()`, we can run it in a separate thread without + blocking the event loop. + + .. note:: + + Due to the :term:`GIL`, `asyncio.to_thread()` can typically only be used + to make IO-bound functions non-blocking. However, for extension modules + that release the GIL or alternative Python implementations that don't + have one, `asyncio.to_thread()` can also be used for CPU-bound functions. + + Scheduling From Other Threads ============================= diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 593f523828703..037e1055c79e5 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -282,6 +282,12 @@ that schedules a shutdown for the default executor that waits on the Added :class:`asyncio.PidfdChildWatcher`, a Linux-specific child watcher implementation that polls process file descriptors. (:issue:`38692`) +Added a new :term:`coroutine` :func:`asyncio.to_thread`. It is mainly used for +running IO-bound functions in a separate thread to avoid blocking the event +loop, and essentially works as a high-level version of +:meth:`~asyncio.loop.run_in_executor` that can directly take keyword arguments. +(Contributed by Kyle Stanley and Yury Selivanov in :issue:`32309`.) + compileall ---------- diff --git a/Lib/asyncio/__init__.py b/Lib/asyncio/__init__.py index 28c2e2c429f34..eb84bfb189ccf 100644 --- a/Lib/asyncio/__init__.py +++ b/Lib/asyncio/__init__.py @@ -17,6 +17,7 @@ from .streams import * from .subprocess import * from .tasks import * +from .threads import * from .transports import * # Exposed for _asynciomodule.c to implement now deprecated @@ -35,6 +36,7 @@ streams.__all__ + subprocess.__all__ + tasks.__all__ + + threads.__all__ + transports.__all__) if sys.platform == 'win32': # pragma: no cover diff --git a/Lib/asyncio/threads.py b/Lib/asyncio/threads.py new file mode 100644 index 0000000000000..2f40467fe5bc7 --- /dev/null +++ b/Lib/asyncio/threads.py @@ -0,0 +1,21 @@ +"""High-level support for working with threads in asyncio""" + +import functools + +from . import events + + +__all__ = "to_thread", + + +async def to_thread(func, /, *args, **kwargs): + """Asynchronously run function *func* in a separate thread. + + Any *args and **kwargs supplied for this function are directly passed + to *func*. + + Return an asyncio.Future which represents the eventual result of *func*. + """ + loop = events.get_running_loop() + func_call = functools.partial(func, *args, **kwargs) + return await loop.run_in_executor(None, func_call) diff --git a/Lib/test/test_asyncio/test_threads.py b/Lib/test/test_asyncio/test_threads.py new file mode 100644 index 0000000000000..99a00f21832f3 --- /dev/null +++ b/Lib/test/test_asyncio/test_threads.py @@ -0,0 +1,79 @@ +"""Tests for asyncio/threads.py""" + +import asyncio +import unittest + +from unittest import mock +from test.test_asyncio import utils as test_utils + + +def tearDownModule(): + asyncio.set_event_loop_policy(None) + + +class ToThreadTests(test_utils.TestCase): + def setUp(self): + super().setUp() + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + def tearDown(self): + self.loop.run_until_complete( + self.loop.shutdown_default_executor()) + self.loop.close() + asyncio.set_event_loop(None) + self.loop = None + super().tearDown() + + def test_to_thread(self): + async def main(): + return await asyncio.to_thread(sum, [40, 2]) + + result = self.loop.run_until_complete(main()) + self.assertEqual(result, 42) + + def test_to_thread_exception(self): + def raise_runtime(): + raise RuntimeError("test") + + async def main(): + await asyncio.to_thread(raise_runtime) + + with self.assertRaisesRegex(RuntimeError, "test"): + self.loop.run_until_complete(main()) + + def test_to_thread_once(self): + func = mock.Mock() + + async def main(): + await asyncio.to_thread(func) + + self.loop.run_until_complete(main()) + func.assert_called_once() + + def test_to_thread_concurrent(self): + func = mock.Mock() + + async def main(): + futs = [] + for _ in range(10): + fut = asyncio.to_thread(func) + futs.append(fut) + await asyncio.gather(*futs) + + self.loop.run_until_complete(main()) + self.assertEqual(func.call_count, 10) + + def test_to_thread_args_kwargs(self): + # Unlike run_in_executor(), to_thread() should directly accept kwargs. + func = mock.Mock() + + async def main(): + await asyncio.to_thread(func, 'test', something=True) + + self.loop.run_until_complete(main()) + func.assert_called_once_with('test', something=True) + + +if __name__ == "__main__": + unittest.main() diff --git a/Misc/NEWS.d/next/Library/2020-05-17-02-03-09.bpo-32309.KM9psl.rst b/Misc/NEWS.d/next/Library/2020-05-17-02-03-09.bpo-32309.KM9psl.rst new file mode 100644 index 0000000000000..6272c35edf4d5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-17-02-03-09.bpo-32309.KM9psl.rst @@ -0,0 +1,4 @@ +Added a new :term:`coroutine` :func:`asyncio.to_thread`. It is mainly used for +running IO-bound functions in a separate thread to avoid blocking the event +loop, and essentially works as a high-level version of +:meth:`~asyncio.loop.run_in_executor` that can directly take keyword arguments. \ No newline at end of file From webhook-mailer at python.org Tue May 19 05:28:04 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 09:28:04 -0000 Subject: [Python-checkins] bpo-39976: Add **other_popen_kwargs to subprocess docs (GH-20145) Message-ID: https://github.com/python/cpython/commit/257e11cebde6b29177a206abd1e395367799ed42 commit: 257e11cebde6b29177a206abd1e395367799ed42 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T02:27:49-07:00 summary: bpo-39976: Add **other_popen_kwargs to subprocess docs (GH-20145) (cherry picked from commit 46545000c2a30b46aed717b546bc09e5bae7148f) Co-authored-by: Zackery Spytz files: M Doc/library/subprocess.rst diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 0b692b4dec6c9..a0152a8009179 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -40,7 +40,7 @@ compatibility with older versions, see the :ref:`call-function-trio` section. .. function:: run(args, *, stdin=None, input=None, stdout=None, stderr=None,\ capture_output=False, shell=False, cwd=None, timeout=None, \ check=False, encoding=None, errors=None, text=None, env=None, \ - universal_newlines=None) + universal_newlines=None, **other_popen_kwargs) Run the command described by *args*. Wait for command to complete, then return a :class:`CompletedProcess` instance. @@ -1049,7 +1049,8 @@ Prior to Python 3.5, these three functions comprised the high level API to subprocess. You can now use :func:`run` in many cases, but lots of existing code calls these functions. -.. function:: call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None) +.. function:: call(args, *, stdin=None, stdout=None, stderr=None, \ + shell=False, cwd=None, timeout=None, **other_popen_kwargs) Run the command described by *args*. Wait for command to complete, then return the :attr:`~Popen.returncode` attribute. @@ -1075,7 +1076,9 @@ calls these functions. .. versionchanged:: 3.3 *timeout* was added. -.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None) +.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, \ + shell=False, cwd=None, timeout=None, \ + **other_popen_kwargs) Run command with arguments. Wait for command to complete. If the return code was zero then return, otherwise raise :exc:`CalledProcessError`. The @@ -1106,7 +1109,8 @@ calls these functions. .. function:: check_output(args, *, stdin=None, stderr=None, shell=False, \ cwd=None, encoding=None, errors=None, \ - universal_newlines=None, timeout=None, text=None) + universal_newlines=None, timeout=None, text=None, \ + **other_popen_kwargs) Run command with arguments and return its output. From webhook-mailer at python.org Tue May 19 05:28:26 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 09:28:26 -0000 Subject: [Python-checkins] bpo-39976: Add **other_popen_kwargs to subprocess docs (GH-20145) Message-ID: https://github.com/python/cpython/commit/05525fff8a46f4d479cc029e4ea57b35b153f015 commit: 05525fff8a46f4d479cc029e4ea57b35b153f015 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T02:28:22-07:00 summary: bpo-39976: Add **other_popen_kwargs to subprocess docs (GH-20145) (cherry picked from commit 46545000c2a30b46aed717b546bc09e5bae7148f) Co-authored-by: Zackery Spytz files: M Doc/library/subprocess.rst diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 9b5b4565c76ab..9f2a05662860e 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -40,7 +40,7 @@ compatibility with older versions, see the :ref:`call-function-trio` section. .. function:: run(args, *, stdin=None, input=None, stdout=None, stderr=None,\ capture_output=False, shell=False, cwd=None, timeout=None, \ check=False, encoding=None, errors=None, text=None, env=None, \ - universal_newlines=None) + universal_newlines=None, **other_popen_kwargs) Run the command described by *args*. Wait for command to complete, then return a :class:`CompletedProcess` instance. @@ -1005,7 +1005,8 @@ Prior to Python 3.5, these three functions comprised the high level API to subprocess. You can now use :func:`run` in many cases, but lots of existing code calls these functions. -.. function:: call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None) +.. function:: call(args, *, stdin=None, stdout=None, stderr=None, \ + shell=False, cwd=None, timeout=None, **other_popen_kwargs) Run the command described by *args*. Wait for command to complete, then return the :attr:`~Popen.returncode` attribute. @@ -1031,7 +1032,9 @@ calls these functions. .. versionchanged:: 3.3 *timeout* was added. -.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None) +.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, \ + shell=False, cwd=None, timeout=None, \ + **other_popen_kwargs) Run command with arguments. Wait for command to complete. If the return code was zero then return, otherwise raise :exc:`CalledProcessError`. The @@ -1062,7 +1065,8 @@ calls these functions. .. function:: check_output(args, *, stdin=None, stderr=None, shell=False, \ cwd=None, encoding=None, errors=None, \ - universal_newlines=None, timeout=None, text=None) + universal_newlines=None, timeout=None, text=None, \ + **other_popen_kwargs) Run command with arguments and return its output. From webhook-mailer at python.org Tue May 19 07:39:33 2020 From: webhook-mailer at python.org (Ned Deily) Date: Tue, 19 May 2020 11:39:33 -0000 Subject: [Python-checkins] 3.10 whatsnew needs to use blurb-produced changelog (GH-20213) Message-ID: https://github.com/python/cpython/commit/29251b787be93d47d06f58a9da65dd29f97fa632 commit: 29251b787be93d47d06f58a9da65dd29f97fa632 branch: master author: Ned Deily committer: GitHub date: 2020-05-19T07:39:29-04:00 summary: 3.10 whatsnew needs to use blurb-produced changelog (GH-20213) files: M Doc/whatsnew/3.10.rst diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 96cb132811109..547778599ef61 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -47,7 +47,7 @@ This article explains the new features in Python 3.10, compared to 3.9. -For full details, see the :source:`Misc/NEWS` file. +For full details, see the :ref:`changelog `. .. note:: From webhook-mailer at python.org Tue May 19 08:20:44 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Tue, 19 May 2020 12:20:44 -0000 Subject: [Python-checkins] bpo-39631: Fix file association MIME type in the Windows installer (GH-20205) Message-ID: https://github.com/python/cpython/commit/8c862e51248c5ebfec787badec88eb58c9267e1e commit: 8c862e51248c5ebfec787badec88eb58c9267e1e branch: master author: Zackery Spytz committer: GitHub date: 2020-05-19T13:20:39+01:00 summary: bpo-39631: Fix file association MIME type in the Windows installer (GH-20205) Use text/x-python instead of text/plain to avoid issues with tools assuming that "ShellExecute(script)" is a non-executable operation. files: M Tools/msi/launcher/launcher_reg.wxs diff --git a/Tools/msi/launcher/launcher_reg.wxs b/Tools/msi/launcher/launcher_reg.wxs index dace97ee58bb7..e8d9d24d43fa8 100644 --- a/Tools/msi/launcher/launcher_reg.wxs +++ b/Tools/msi/launcher/launcher_reg.wxs @@ -6,14 +6,14 @@ - + - + From webhook-mailer at python.org Tue May 19 08:22:20 2020 From: webhook-mailer at python.org (Minmin Gong) Date: Tue, 19 May 2020 12:22:20 -0000 Subject: [Python-checkins] bpo-40677: Define IO_REPARSE_TAG_APPEXECLINK explicitly (GH-20206) Message-ID: https://github.com/python/cpython/commit/711f9e180a48baba62301735b7f1a58ef0d0e93a commit: 711f9e180a48baba62301735b7f1a58ef0d0e93a branch: master author: Minmin Gong committer: GitHub date: 2020-05-19T13:22:16+01:00 summary: bpo-40677: Define IO_REPARSE_TAG_APPEXECLINK explicitly (GH-20206) This allows building with older versions of the Windows SDK where the value is not defined. files: A Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst M Modules/_stat.c diff --git a/Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst b/Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst new file mode 100644 index 0000000000000..a09cb243aba31 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst @@ -0,0 +1 @@ +Manually define IO_REPARSE_TAG_APPEXECLINK in case some old Windows SDK doesn't have it. \ No newline at end of file diff --git a/Modules/_stat.c b/Modules/_stat.c index c7090c02688de..546e6a5f94ca1 100644 --- a/Modules/_stat.c +++ b/Modules/_stat.c @@ -40,6 +40,10 @@ typedef unsigned short mode_t; # define FILE_ATTRIBUTE_NO_SCRUB_DATA 0x20000 #endif +#ifndef IO_REPARSE_TAG_APPEXECLINK +# define IO_REPARSE_TAG_APPEXECLINK 0x8000001BL +#endif + #endif /* MS_WINDOWS */ /* From Python's stat.py */ From webhook-mailer at python.org Tue May 19 08:26:47 2020 From: webhook-mailer at python.org (Julien Palard) Date: Tue, 19 May 2020 12:26:47 -0000 Subject: [Python-checkins] Doc: Python 3.10 in sidebar and version switcher. (GH-20209) Message-ID: https://github.com/python/cpython/commit/19e3e0026417caa92ffe21a67157363b45da9aa2 commit: 19e3e0026417caa92ffe21a67157363b45da9aa2 branch: master author: Julien Palard committer: GitHub date: 2020-05-19T14:26:43+02:00 summary: Doc: Python 3.10 in sidebar and version switcher. (GH-20209) files: M Doc/tools/static/switchers.js M Doc/tools/templates/indexsidebar.html diff --git a/Doc/tools/static/switchers.js b/Doc/tools/static/switchers.js index e1ef91a8dfc68..c51f178ce645c 100644 --- a/Doc/tools/static/switchers.js +++ b/Doc/tools/static/switchers.js @@ -10,7 +10,8 @@ '(?:release/\\d.\\d[\\x\\d\\.]*)']; var all_versions = { - '3.9': 'dev (3.9)', + '3.10': 'dev (3.10)', + '3.9': 'pre (3.9)', '3.8': '3.8', '3.7': '3.7', '3.6': '3.6', diff --git a/Doc/tools/templates/indexsidebar.html b/Doc/tools/templates/indexsidebar.html index 4730a5fe5db7b..7a40be77aa129 100644 --- a/Doc/tools/templates/indexsidebar.html +++ b/Doc/tools/templates/indexsidebar.html @@ -2,7 +2,8 @@

{% trans %}Download{% endtrans %}

{% trans %}Docs by version{% endtrans %}

    -
  • {% trans %}Python 3.9 (in development){% endtrans %}
  • +
  • {% trans %}Python 3.10 (in development){% endtrans %}
  • +
  • {% trans %}Python 3.9 (pre-release){% endtrans %}
  • {% trans %}Python 3.8 (stable){% endtrans %}
  • {% trans %}Python 3.7 (stable){% endtrans %}
  • {% trans %}Python 3.6 (security-fixes){% endtrans %}
  • From webhook-mailer at python.org Tue May 19 08:32:35 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 12:32:35 -0000 Subject: [Python-checkins] [3.7] Doc: Python 3.10 in sidebar and version switcher. (GH-20209) (GH-20223) Message-ID: https://github.com/python/cpython/commit/547d0bbf4a154f31fb0d6ad2c2d9bb5ae9b18b32 commit: 547d0bbf4a154f31fb0d6ad2c2d9bb5ae9b18b32 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T05:32:30-07:00 summary: [3.7] Doc: Python 3.10 in sidebar and version switcher. (GH-20209) (GH-20223) (cherry picked from commit 19e3e0026417caa92ffe21a67157363b45da9aa2) Co-authored-by: Julien Palard Automerge-Triggered-By: @JulienPalard files: M Doc/tools/static/switchers.js M Doc/tools/templates/indexsidebar.html diff --git a/Doc/tools/static/switchers.js b/Doc/tools/static/switchers.js index e1ef91a8dfc68..c51f178ce645c 100644 --- a/Doc/tools/static/switchers.js +++ b/Doc/tools/static/switchers.js @@ -10,7 +10,8 @@ '(?:release/\\d.\\d[\\x\\d\\.]*)']; var all_versions = { - '3.9': 'dev (3.9)', + '3.10': 'dev (3.10)', + '3.9': 'pre (3.9)', '3.8': '3.8', '3.7': '3.7', '3.6': '3.6', diff --git a/Doc/tools/templates/indexsidebar.html b/Doc/tools/templates/indexsidebar.html index 36f94ee03837e..c5924fd5774c5 100644 --- a/Doc/tools/templates/indexsidebar.html +++ b/Doc/tools/templates/indexsidebar.html @@ -2,7 +2,8 @@

    {% trans %}Download{% endtrans %}

    {% trans %}Download these documents{% endtrans %}

    {% trans %}Docs by version{% endtrans %}

      -
    • {% trans %}Python 3.9 (in development){% endtrans %}
    • +
    • {% trans %}Python 3.10 (in development){% endtrans %}
    • +
    • {% trans %}Python 3.9 (pre-release){% endtrans %}
    • {% trans %}Python 3.8 (stable){% endtrans %}
    • {% trans %}Python 3.7 (stable){% endtrans %}
    • {% trans %}Python 3.6 (security-fixes){% endtrans %}
    • From webhook-mailer at python.org Tue May 19 08:34:10 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 12:34:10 -0000 Subject: [Python-checkins] Doc: Python 3.10 in sidebar and version switcher. (GH-20209) Message-ID: https://github.com/python/cpython/commit/b5279c1ab80ea74a88fec6eaa9ee9f156eb0112f commit: b5279c1ab80ea74a88fec6eaa9ee9f156eb0112f branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T05:34:01-07:00 summary: Doc: Python 3.10 in sidebar and version switcher. (GH-20209) (cherry picked from commit 19e3e0026417caa92ffe21a67157363b45da9aa2) Co-authored-by: Julien Palard files: M Doc/tools/static/switchers.js M Doc/tools/templates/indexsidebar.html diff --git a/Doc/tools/static/switchers.js b/Doc/tools/static/switchers.js index e1ef91a8dfc68..c51f178ce645c 100644 --- a/Doc/tools/static/switchers.js +++ b/Doc/tools/static/switchers.js @@ -10,7 +10,8 @@ '(?:release/\\d.\\d[\\x\\d\\.]*)']; var all_versions = { - '3.9': 'dev (3.9)', + '3.10': 'dev (3.10)', + '3.9': 'pre (3.9)', '3.8': '3.8', '3.7': '3.7', '3.6': '3.6', diff --git a/Doc/tools/templates/indexsidebar.html b/Doc/tools/templates/indexsidebar.html index 4730a5fe5db7b..7a40be77aa129 100644 --- a/Doc/tools/templates/indexsidebar.html +++ b/Doc/tools/templates/indexsidebar.html @@ -2,7 +2,8 @@

      {% trans %}Download{% endtrans %}

      {% trans %}Download these documents{% endtrans %}

      {% trans %}Docs by version{% endtrans %}

        -
      • {% trans %}Python 3.9 (in development){% endtrans %}
      • +
      • {% trans %}Python 3.10 (in development){% endtrans %}
      • +
      • {% trans %}Python 3.9 (pre-release){% endtrans %}
      • {% trans %}Python 3.8 (stable){% endtrans %}
      • {% trans %}Python 3.7 (stable){% endtrans %}
      • {% trans %}Python 3.6 (security-fixes){% endtrans %}
      • From webhook-mailer at python.org Tue May 19 08:35:03 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 12:35:03 -0000 Subject: [Python-checkins] Doc: Python 3.10 in sidebar and version switcher. (GH-20209) (GH-20224) Message-ID: https://github.com/python/cpython/commit/f5c108959532898f855ed8f0d4f21ade3aa46393 commit: f5c108959532898f855ed8f0d4f21ade3aa46393 branch: 3.6 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T08:34:55-04:00 summary: Doc: Python 3.10 in sidebar and version switcher. (GH-20209) (GH-20224) (cherry picked from commit 19e3e0026417caa92ffe21a67157363b45da9aa2) Co-authored-by: Julien Palard Co-authored-by: Julien Palard files: M Doc/tools/static/switchers.js M Doc/tools/templates/indexsidebar.html diff --git a/Doc/tools/static/switchers.js b/Doc/tools/static/switchers.js index 075e4774938f4..9eb8ec8809aea 100644 --- a/Doc/tools/static/switchers.js +++ b/Doc/tools/static/switchers.js @@ -10,7 +10,8 @@ '(?:release/\\d.\\d[\\x\\d\\.]*)']; var all_versions = { - '3.9': 'dev (3.9)', + '3.10': 'dev (3.10)', + '3.9': 'pre (3.9)', '3.8': '3.8', '3.7': '3.7', '3.6': '3.6', diff --git a/Doc/tools/templates/indexsidebar.html b/Doc/tools/templates/indexsidebar.html index 36f94ee03837e..c5924fd5774c5 100644 --- a/Doc/tools/templates/indexsidebar.html +++ b/Doc/tools/templates/indexsidebar.html @@ -2,7 +2,8 @@

        {% trans %}Download{% endtrans %}

        {% trans %}Download these documents{% endtrans %}

        {% trans %}Docs by version{% endtrans %}

          -
        • {% trans %}Python 3.9 (in development){% endtrans %}
        • +
        • {% trans %}Python 3.10 (in development){% endtrans %}
        • +
        • {% trans %}Python 3.9 (pre-release){% endtrans %}
        • {% trans %}Python 3.8 (stable){% endtrans %}
        • {% trans %}Python 3.7 (stable){% endtrans %}
        • {% trans %}Python 3.6 (security-fixes){% endtrans %}
        • From webhook-mailer at python.org Tue May 19 08:39:14 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 12:39:14 -0000 Subject: [Python-checkins] bpo-40677: Define IO_REPARSE_TAG_APPEXECLINK explicitly (GH-20206) Message-ID: https://github.com/python/cpython/commit/560d6436611900bc23d3dd1158acbe6fa39b2c9b commit: 560d6436611900bc23d3dd1158acbe6fa39b2c9b branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T05:39:06-07:00 summary: bpo-40677: Define IO_REPARSE_TAG_APPEXECLINK explicitly (GH-20206) This allows building with older versions of the Windows SDK where the value is not defined. (cherry picked from commit 711f9e180a48baba62301735b7f1a58ef0d0e93a) Co-authored-by: Minmin Gong files: A Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst M Modules/_stat.c diff --git a/Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst b/Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst new file mode 100644 index 0000000000000..a09cb243aba31 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst @@ -0,0 +1 @@ +Manually define IO_REPARSE_TAG_APPEXECLINK in case some old Windows SDK doesn't have it. \ No newline at end of file diff --git a/Modules/_stat.c b/Modules/_stat.c index 6a3020a00d114..7a799af0cada2 100644 --- a/Modules/_stat.c +++ b/Modules/_stat.c @@ -40,6 +40,10 @@ typedef unsigned short mode_t; # define FILE_ATTRIBUTE_NO_SCRUB_DATA 0x20000 #endif +#ifndef IO_REPARSE_TAG_APPEXECLINK +# define IO_REPARSE_TAG_APPEXECLINK 0x8000001BL +#endif + #endif /* MS_WINDOWS */ /* From Python's stat.py */ From webhook-mailer at python.org Tue May 19 08:41:16 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 12:41:16 -0000 Subject: [Python-checkins] bpo-39631: Fix file association MIME type in the Windows installer (GH-20205) Message-ID: https://github.com/python/cpython/commit/076da79bc75b9aac8b7bc9685253fa7162c25698 commit: 076da79bc75b9aac8b7bc9685253fa7162c25698 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T05:41:07-07:00 summary: bpo-39631: Fix file association MIME type in the Windows installer (GH-20205) Use text/x-python instead of text/plain to avoid issues with tools assuming that "ShellExecute(script)" is a non-executable operation. (cherry picked from commit 8c862e51248c5ebfec787badec88eb58c9267e1e) Co-authored-by: Zackery Spytz files: M Tools/msi/launcher/launcher_reg.wxs diff --git a/Tools/msi/launcher/launcher_reg.wxs b/Tools/msi/launcher/launcher_reg.wxs index dace97ee58bb7..e8d9d24d43fa8 100644 --- a/Tools/msi/launcher/launcher_reg.wxs +++ b/Tools/msi/launcher/launcher_reg.wxs @@ -6,14 +6,14 @@ - + - + From webhook-mailer at python.org Tue May 19 08:43:00 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 12:43:00 -0000 Subject: [Python-checkins] bpo-39631: Fix file association MIME type in the Windows installer (GH-20205) Message-ID: https://github.com/python/cpython/commit/46fc3ec056ff7ce65e9b96f14bd0b06aa1d0c62d commit: 46fc3ec056ff7ce65e9b96f14bd0b06aa1d0c62d branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T05:42:52-07:00 summary: bpo-39631: Fix file association MIME type in the Windows installer (GH-20205) Use text/x-python instead of text/plain to avoid issues with tools assuming that "ShellExecute(script)" is a non-executable operation. (cherry picked from commit 8c862e51248c5ebfec787badec88eb58c9267e1e) Co-authored-by: Zackery Spytz files: M Tools/msi/launcher/launcher_reg.wxs diff --git a/Tools/msi/launcher/launcher_reg.wxs b/Tools/msi/launcher/launcher_reg.wxs index dace97ee58bb7..e8d9d24d43fa8 100644 --- a/Tools/msi/launcher/launcher_reg.wxs +++ b/Tools/msi/launcher/launcher_reg.wxs @@ -6,14 +6,14 @@ - + - + From webhook-mailer at python.org Tue May 19 10:37:28 2020 From: webhook-mailer at python.org (Joe DeCapo) Date: Tue, 19 May 2020 14:37:28 -0000 Subject: [Python-checkins] Fix typo in multiprocessing documentation (GH-20016) Message-ID: https://github.com/python/cpython/commit/a355a06fcc7ef2232736dceb012ae623335cd7ab commit: a355a06fcc7ef2232736dceb012ae623335cd7ab branch: master author: Joe DeCapo <679017+JrGoodle at users.noreply.github.com> committer: GitHub date: 2020-05-19T20:07:09+05:30 summary: Fix typo in multiprocessing documentation (GH-20016) files: M Doc/library/multiprocessing.rst diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index 50b90031ab5a5..08258a65a89dc 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -2144,7 +2144,7 @@ with the :class:`Pool` class. or by calling :meth:`close` and :meth:`terminate` manually. Failure to do this can lead to the process hanging on finalization. - Note that is **not correct** to rely on the garbage colletor to destroy the pool + Note that it is **not correct** to rely on the garbage collector to destroy the pool as CPython does not assure that the finalizer of the pool will be called (see :meth:`object.__del__` for more information). From webhook-mailer at python.org Tue May 19 11:55:26 2020 From: webhook-mailer at python.org (Paul Ganssle) Date: Tue, 19 May 2020 15:55:26 -0000 Subject: [Python-checkins] bpo-40683: Add zoneinfo to LIBSUBDIRS (#20229) Message-ID: https://github.com/python/cpython/commit/2abededbc4165d2daa14ae9d74b1f33cce0593d7 commit: 2abededbc4165d2daa14ae9d74b1f33cce0593d7 branch: master author: Paul Ganssle committer: GitHub date: 2020-05-19T11:55:18-04:00 summary: bpo-40683: Add zoneinfo to LIBSUBDIRS (#20229) Without this, only the _zoneinfo module is getting installed, not the zoneinfo module. I believe this was not noticed earlier because test.test_zoneinfo was also not being installed. files: A Misc/NEWS.d/next/Build/2020-05-19-10-54-08.bpo-40683.W8JHrr.rst M Makefile.pre.in diff --git a/Makefile.pre.in b/Makefile.pre.in index de50f6b7f7022..5a18704e44198 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1429,6 +1429,7 @@ LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \ test/test_importlib/source \ test/test_importlib/zipdata01 \ test/test_importlib/zipdata02 \ + test/test_zoneinfo test/test_zoneinfo/data \ test/ziptestdata \ asyncio \ test/test_asyncio \ @@ -1450,7 +1451,8 @@ LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \ multiprocessing multiprocessing/dummy \ unittest unittest/test unittest/test/testmock \ venv venv/scripts venv/scripts/common venv/scripts/posix \ - curses pydoc_data + curses pydoc_data \ + zoneinfo libinstall: build_all $(srcdir)/Modules/xxmodule.c @for i in $(SCRIPTDIR) $(LIBDEST); \ do \ diff --git a/Misc/NEWS.d/next/Build/2020-05-19-10-54-08.bpo-40683.W8JHrr.rst b/Misc/NEWS.d/next/Build/2020-05-19-10-54-08.bpo-40683.W8JHrr.rst new file mode 100644 index 0000000000000..d57e064c03d61 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-05-19-10-54-08.bpo-40683.W8JHrr.rst @@ -0,0 +1,2 @@ +Fixed an issue where the :mod:`zoneinfo` module and its tests were not +included when Python is installed with ``make``. From webhook-mailer at python.org Tue May 19 12:28:14 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Tue, 19 May 2020 16:28:14 -0000 Subject: [Python-checkins] bpo-37616: Handle version information more gracefully in getpath.c (GH-20214) Message-ID: https://github.com/python/cpython/commit/2a561b5f6830aee39cf05dc70c24e26c3558dda0 commit: 2a561b5f6830aee39cf05dc70c24e26c3558dda0 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-19T17:28:05+01:00 summary: bpo-37616: Handle version information more gracefully in getpath.c (GH-20214) files: M Modules/getpath.c diff --git a/Modules/getpath.c b/Modules/getpath.c index 91cc449218c4a..d9829f8ad3dbd 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -1296,8 +1296,8 @@ calculate_zip_path(PyCalculatePath *calculate) { PyStatus res; - /* Path: / "python00.zip" */ - wchar_t *path = joinpath2(calculate->platlibdir_macro, L"python000.zip"); + /* Path: / "pythonXY.zip" */ + wchar_t *path = joinpath2(calculate->platlibdir_macro, L"python" Py_STRINGIFY(PY_MAJOR_VERSION) Py_STRINGIFY(PY_MINOR_VERSION) L".zip"); if (path == NULL) { return _PyStatus_NO_MEMORY(); } @@ -1305,7 +1305,7 @@ calculate_zip_path(PyCalculatePath *calculate) if (calculate->prefix_found > 0) { /* Use the reduced prefix returned by Py_GetPrefix() - Path: / / "python000.zip" */ + Path: / / "pythonXY.zip" */ wchar_t *parent = _PyMem_RawWcsdup(calculate->prefix); if (parent == NULL) { res = _PyStatus_NO_MEMORY(); @@ -1325,12 +1325,6 @@ calculate_zip_path(PyCalculatePath *calculate) goto done; } - /* Replace "000" with the version */ - size_t len = wcslen(calculate->zip_path); - calculate->zip_path[len - 7] = VERSION[0]; - calculate->zip_path[len - 6] = VERSION[2]; - calculate->zip_path[len - 5] = VERSION[3]; - res = _PyStatus_OK(); done: From webhook-mailer at python.org Tue May 19 13:01:41 2020 From: webhook-mailer at python.org (Hugo van Kemenade) Date: Tue, 19 May 2020 17:01:41 -0000 Subject: [Python-checkins] Enable GitHub Actions for 3.9 branch (GH-20231) Message-ID: https://github.com/python/cpython/commit/c105f7d8955ef9bf51125f6e44e8543e5f0618df commit: c105f7d8955ef9bf51125f6e44e8543e5f0618df branch: master author: Hugo van Kemenade committer: GitHub date: 2020-05-19T19:01:32+02:00 summary: Enable GitHub Actions for 3.9 branch (GH-20231) files: M .github/workflows/build.yml M .github/workflows/build_msi.yml M .github/workflows/coverage.yml M .github/workflows/doc.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7c3bca3fc0671..6bb52cb6a5daa 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,11 +7,13 @@ on: push: branches: - master + - 3.9 - 3.8 - 3.7 pull_request: branches: - master + - 3.9 - 3.8 - 3.7 diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml index e9ecf54727562..fb58e417cdd4f 100644 --- a/.github/workflows/build_msi.yml +++ b/.github/workflows/build_msi.yml @@ -4,6 +4,7 @@ on: push: branches: - master + - 3.9 - 3.8 - 3.7 paths: @@ -11,6 +12,7 @@ on: pull_request: branches: - master + - 3.9 - 3.8 - 3.7 paths: diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 6dd973bf8e4ad..6fc15d3bffd5e 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -4,6 +4,7 @@ on: push: branches: - master + - 3.9 - 3.8 - 3.7 paths-ignore: @@ -12,6 +13,7 @@ on: #pull_request: # branches: # - master + # - 3.9 # - 3.8 # - 3.7 # paths-ignore: diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index c8d395cea5156..44107853ee7cf 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -4,6 +4,7 @@ on: #push: # branches: # - master + # - 3.9 # - 3.8 # - 3.7 # paths: @@ -11,6 +12,7 @@ on: pull_request: branches: - master + - 3.9 - 3.8 - 3.7 paths: From webhook-mailer at python.org Tue May 19 13:20:43 2020 From: webhook-mailer at python.org (Joannah Nanjekye) Date: Tue, 19 May 2020 17:20:43 -0000 Subject: [Python-checkins] bpo-32604: PEP 554 for use in test suite (GH-19985) Message-ID: https://github.com/python/cpython/commit/9d17cbf33df7cfb67ca0f37f6463ba5c18676641 commit: 9d17cbf33df7cfb67ca0f37f6463ba5c18676641 branch: master author: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> committer: GitHub date: 2020-05-19T14:20:38-03:00 summary: bpo-32604: PEP 554 for use in test suite (GH-19985) * PEP 554 for use in test suite * ?? Added by blurb_it. * Fix space * Add doc to doc tree * Move to modules doc tree * Fix suspicious doc errors * Fix test__all * Docs docs docs * Support isolated and fix wait * Fix white space * Remove undefined from __all__ * Fix recv and add exceptions * Remove unused exceptions, fix pep 8 formatting errors and fix _NOT_SET in recv_nowait() Co-authored-by: nanjekyejoannah Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> files: A Lib/test/support/interpreters.py A Lib/test/support/interpreters.rst A Lib/test/test_interpreters.py A Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst diff --git a/Lib/test/support/interpreters.py b/Lib/test/support/interpreters.py new file mode 100644 index 0000000000000..ef9dcafb2a386 --- /dev/null +++ b/Lib/test/support/interpreters.py @@ -0,0 +1,183 @@ +"""Subinterpreters High Level Module.""" + +import _xxsubinterpreters as _interpreters + +# aliases: +from _xxsubinterpreters import ( + ChannelError, ChannelNotFoundError, ChannelEmptyError, + is_shareable, +) + + +__all__ = [ + 'Interpreter', 'get_current', 'get_main', 'create', 'list_all', + 'SendChannel', 'RecvChannel', + 'create_channel', 'list_all_channels', 'is_shareable', + 'ChannelError', 'ChannelNotFoundError', + 'ChannelEmptyError', + ] + + +def create(*, isolated=True): + """ + Initialize a new (idle) Python interpreter. + """ + id = _interpreters.create(isolated=isolated) + return Interpreter(id, isolated=isolated) + + +def list_all(): + """ + Get all existing interpreters. + """ + return [Interpreter(id) for id in + _interpreters.list_all()] + + +def get_current(): + """ + Get the currently running interpreter. + """ + id = _interpreters.get_current() + return Interpreter(id) + + +def get_main(): + """ + Get the main interpreter. + """ + id = _interpreters.get_main() + return Interpreter(id) + + +class Interpreter: + """ + The Interpreter object represents + a single interpreter. + """ + + def __init__(self, id, *, isolated=None): + self._id = id + self._isolated = isolated + + @property + def id(self): + return self._id + + @property + def isolated(self): + if self._isolated is None: + self._isolated = _interpreters.is_isolated(self._id) + return self._isolated + + def is_running(self): + """ + Return whether or not the identified + interpreter is running. + """ + return _interpreters.is_running(self._id) + + def close(self): + """ + Finalize and destroy the interpreter. + + Attempting to destroy the current + interpreter results in a RuntimeError. + """ + return _interpreters.destroy(self._id) + + def run(self, src_str, /, *, channels=None): + """ + Run the given source code in the interpreter. + This blocks the current Python thread until done. + """ + _interpreters.run_string(self._id, src_str) + + +def create_channel(): + """ + Create a new channel for passing data between + interpreters. + """ + + cid = _interpreters.channel_create() + return (RecvChannel(cid), SendChannel(cid)) + + +def list_all_channels(): + """ + Get all open channels. + """ + return [(RecvChannel(cid), SendChannel(cid)) + for cid in _interpreters.channel_list_all()] + + +_NOT_SET = object() + + +class RecvChannel: + """ + The RecvChannel object represents + a recieving channel. + """ + + def __init__(self, id): + self._id = id + + def recv(self, *, _delay=10 / 1000): # 10 milliseconds + """ + Get the next object from the channel, + and wait if none have been sent. + Associate the interpreter with the channel. + """ + import time + sentinel = object() + obj = _interpreters.channel_recv(self._id, sentinel) + while obj is sentinel: + time.sleep(_delay) + obj = _interpreters.channel_recv(self._id, sentinel) + return obj + + def recv_nowait(self, default=_NOT_SET): + """ + Like recv(), but return the default + instead of waiting. + + This function is blocked by a missing low-level + implementation of channel_recv_wait(). + """ + if default is _NOT_SET: + return _interpreters.channel_recv(self._id) + else: + return _interpreters.channel_recv(self._id, default) + + +class SendChannel: + """ + The SendChannel object represents + a sending channel. + """ + + def __init__(self, id): + self._id = id + + def send(self, obj): + """ + Send the object (i.e. its data) to the receiving + end of the channel and wait. Associate the interpreter + with the channel. + """ + import time + _interpreters.channel_send(self._id, obj) + time.sleep(2) + + def send_nowait(self, obj): + """ + Like send(), but return False if not received. + + This function is blocked by a missing low-level + implementation of channel_send_wait(). + """ + + _interpreters.channel_send(self._id, obj) + return False diff --git a/Lib/test/support/interpreters.rst b/Lib/test/support/interpreters.rst new file mode 100644 index 0000000000000..9a05eb67520c8 --- /dev/null +++ b/Lib/test/support/interpreters.rst @@ -0,0 +1,145 @@ +High-level implementation of Subinterpreters +============================================ + +**Source code:** :source:`Lib/test/support/_interpreters.py` + +-------------- + +This module provides high-level tools for working with sub-interpreters, +such as creating them, running code in them, or sending data between them. +It is a wrapper around the low-level ``__xxsubinterpreters`` module. + +.. versionchanged:: added in 3.9 + +Interpreter Objects +------------------- + +The ``Interpreter`` object represents a single interpreter. + +.. class:: Interpreter(id) + + The class implementing a subinterpreter object. + + .. method:: is_running() + + Return ``True`` if the identified interpreter is running. + + .. method:: close() + + Destroy the interpreter. Attempting to destroy the current + interpreter results in a `RuntimeError`. + + .. method:: run(self, src_str, /, *, channels=None): + + Run the given source code in the interpreter. This blocks + the current thread until done. ``channels`` should be in + the form : `(RecvChannel, SendChannel)`. + +RecvChannel Objects +------------------- + +The ``RecvChannel`` object represents a recieving channel. + +.. class:: RecvChannel(id) + + This class represents the receiving end of a channel. + + .. method:: recv() + + Get the next object from the channel, and wait if + none have been sent. Associate the interpreter + with the channel. + + .. method:: recv_nowait(default=None) + + Like ``recv()``, but return the default result + instead of waiting. + + +SendChannel Objects +-------------------- + +The ``SendChannel`` object represents a sending channel. + +.. class:: SendChannel(id) + + This class represents the sending end of a channel. + + .. method:: send(obj) + + Send the object ``obj`` to the receiving end of the channel + and wait. Associate the interpreter with the channel. + + .. method:: send_nowait(obj) + + Similar to ``send()``, but returns ``False`` if + *obj* is not immediately received instead of blocking. + + +This module defines the following global functions: + + +.. function:: is_shareable(obj) + + Return ``True`` if the object's data can be shared between + interpreters. + +.. function:: create_channel() + + Create a new channel for passing data between interpreters. + +.. function:: list_all_channels() + + Return all open channels. + +.. function:: create(*, isolated=True) + + Initialize a new (idle) Python interpreter. Get the currently + running interpreter. This method returns an ``Interpreter`` object. + +.. function:: get_current() + + Get the currently running interpreter. This method returns + an ``Interpreter`` object. + +.. function:: get_main() + + Get the main interpreter. This method returns + an ``Interpreter`` object. + +.. function:: list_all() + + Get all existing interpreters. Returns a list + of ``Interpreter`` objects. + +This module also defines the following exceptions. + +.. exception:: RunFailedError + + This exception, a subclass of :exc:`RuntimeError`, is raised when the + ``Interpreter.run()`` results in an uncaught exception. + +.. exception:: ChannelError + + This exception is a subclass of :exc:`Exception`, and is the base + class for all channel-related exceptions. + +.. exception:: ChannelNotFoundError + + This exception is a subclass of :exc:`ChannelError`, and is raised + when the the identified channel is not found. + +.. exception:: ChannelEmptyError + + This exception is a subclass of :exc:`ChannelError`, and is raised when + the channel is unexpectedly empty. + +.. exception:: ChannelNotEmptyError + + This exception is a subclass of :exc:`ChannelError`, and is raised when + the channel is unexpectedly not empty. + +.. exception:: NotReceivedError + + This exception is a subclass of :exc:`ChannelError`, and is raised when + nothing was waiting to receive a sent object. diff --git a/Lib/test/test_interpreters.py b/Lib/test/test_interpreters.py new file mode 100644 index 0000000000000..3451a4c8759d8 --- /dev/null +++ b/Lib/test/test_interpreters.py @@ -0,0 +1,535 @@ +import contextlib +import os +import threading +from textwrap import dedent +import unittest +import time + +import _xxsubinterpreters as _interpreters +from test.support import interpreters + + +def _captured_script(script): + r, w = os.pipe() + indented = script.replace('\n', '\n ') + wrapped = dedent(f""" + import contextlib + with open({w}, 'w') as spipe: + with contextlib.redirect_stdout(spipe): + {indented} + """) + return wrapped, open(r) + + +def clean_up_interpreters(): + for interp in interpreters.list_all(): + if interp.id == 0: # main + continue + try: + interp.close() + except RuntimeError: + pass # already destroyed + + +def _run_output(interp, request, shared=None): + script, rpipe = _captured_script(request) + with rpipe: + interp.run(script) + return rpipe.read() + + + at contextlib.contextmanager +def _running(interp): + r, w = os.pipe() + def run(): + interp.run(dedent(f""" + # wait for "signal" + with open({r}) as rpipe: + rpipe.read() + """)) + + t = threading.Thread(target=run) + t.start() + + yield + + with open(w, 'w') as spipe: + spipe.write('done') + t.join() + + +class TestBase(unittest.TestCase): + + def tearDown(self): + clean_up_interpreters() + + +class CreateTests(TestBase): + + def test_in_main(self): + interp = interpreters.create() + lst = interpreters.list_all() + self.assertEqual(interp.id, lst[1].id) + + def test_in_thread(self): + lock = threading.Lock() + id = None + interp = interpreters.create() + lst = interpreters.list_all() + def f(): + nonlocal id + id = interp.id + lock.acquire() + lock.release() + + t = threading.Thread(target=f) + with lock: + t.start() + t.join() + self.assertEqual(interp.id, lst[1].id) + + def test_in_subinterpreter(self): + main, = interpreters.list_all() + interp = interpreters.create() + out = _run_output(interp, dedent(""" + from test.support import interpreters + interp = interpreters.create() + print(interp) + """)) + interp2 = out.strip() + + self.assertEqual(len(set(interpreters.list_all())), len({main, interp, interp2})) + + def test_after_destroy_all(self): + before = set(interpreters.list_all()) + # Create 3 subinterpreters. + interp_lst = [] + for _ in range(3): + interps = interpreters.create() + interp_lst.append(interps) + # Now destroy them. + for interp in interp_lst: + interp.close() + # Finally, create another. + interp = interpreters.create() + self.assertEqual(len(set(interpreters.list_all())), len(before | {interp})) + + def test_after_destroy_some(self): + before = set(interpreters.list_all()) + # Create 3 subinterpreters. + interp1 = interpreters.create() + interp2 = interpreters.create() + interp3 = interpreters.create() + # Now destroy 2 of them. + interp1.close() + interp2.close() + # Finally, create another. + interp = interpreters.create() + self.assertEqual(len(set(interpreters.list_all())), len(before | {interp3, interp})) + + +class GetCurrentTests(TestBase): + + def test_main(self): + main_interp_id = _interpreters.get_main() + cur_interp_id = interpreters.get_current().id + self.assertEqual(cur_interp_id, main_interp_id) + + def test_subinterpreter(self): + main = _interpreters.get_main() + interp = interpreters.create() + out = _run_output(interp, dedent(""" + from test.support import interpreters + cur = interpreters.get_current() + print(cur) + """)) + cur = out.strip() + self.assertNotEqual(cur, main) + + +class ListAllTests(TestBase): + + def test_initial(self): + interps = interpreters.list_all() + self.assertEqual(1, len(interps)) + + def test_after_creating(self): + main = interpreters.get_current() + first = interpreters.create() + second = interpreters.create() + + ids = [] + for interp in interpreters.list_all(): + ids.append(interp.id) + + self.assertEqual(ids, [main.id, first.id, second.id]) + + def test_after_destroying(self): + main = interpreters.get_current() + first = interpreters.create() + second = interpreters.create() + first.close() + + ids = [] + for interp in interpreters.list_all(): + ids.append(interp.id) + + self.assertEqual(ids, [main.id, second.id]) + + +class TestInterpreterId(TestBase): + + def test_in_main(self): + main = interpreters.get_current() + self.assertEqual(0, main.id) + + def test_with_custom_num(self): + interp = interpreters.Interpreter(1) + self.assertEqual(1, interp.id) + + def test_for_readonly_property(self): + interp = interpreters.Interpreter(1) + with self.assertRaises(AttributeError): + interp.id = 2 + + +class TestInterpreterIsRunning(TestBase): + + def test_main(self): + main = interpreters.get_current() + self.assertTrue(main.is_running()) + + def test_subinterpreter(self): + interp = interpreters.create() + self.assertFalse(interp.is_running()) + + with _running(interp): + self.assertTrue(interp.is_running()) + self.assertFalse(interp.is_running()) + + def test_from_subinterpreter(self): + interp = interpreters.create() + out = _run_output(interp, dedent(f""" + import _xxsubinterpreters as _interpreters + if _interpreters.is_running({interp.id}): + print(True) + else: + print(False) + """)) + self.assertEqual(out.strip(), 'True') + + def test_already_destroyed(self): + interp = interpreters.create() + interp.close() + with self.assertRaises(RuntimeError): + interp.is_running() + + +class TestInterpreterDestroy(TestBase): + + def test_basic(self): + interp1 = interpreters.create() + interp2 = interpreters.create() + interp3 = interpreters.create() + self.assertEqual(4, len(interpreters.list_all())) + interp2.close() + self.assertEqual(3, len(interpreters.list_all())) + + def test_all(self): + before = set(interpreters.list_all()) + interps = set() + for _ in range(3): + interp = interpreters.create() + interps.add(interp) + self.assertEqual(len(set(interpreters.list_all())), len(before | interps)) + for interp in interps: + interp.close() + self.assertEqual(len(set(interpreters.list_all())), len(before)) + + def test_main(self): + main, = interpreters.list_all() + with self.assertRaises(RuntimeError): + main.close() + + def f(): + with self.assertRaises(RuntimeError): + main.close() + + t = threading.Thread(target=f) + t.start() + t.join() + + def test_already_destroyed(self): + interp = interpreters.create() + interp.close() + with self.assertRaises(RuntimeError): + interp.close() + + def test_from_current(self): + main, = interpreters.list_all() + interp = interpreters.create() + script = dedent(f""" + from test.support import interpreters + try: + main = interpreters.get_current() + main.close() + except RuntimeError: + pass + """) + + interp.run(script) + self.assertEqual(len(set(interpreters.list_all())), len({main, interp})) + + def test_from_sibling(self): + main, = interpreters.list_all() + interp1 = interpreters.create() + script = dedent(f""" + from test.support import interpreters + interp2 = interpreters.create() + interp2.close() + """) + interp1.run(script) + + self.assertEqual(len(set(interpreters.list_all())), len({main, interp1})) + + def test_from_other_thread(self): + interp = interpreters.create() + def f(): + interp.close() + + t = threading.Thread(target=f) + t.start() + t.join() + + def test_still_running(self): + main, = interpreters.list_all() + interp = interpreters.create() + with _running(interp): + with self.assertRaises(RuntimeError): + interp.close() + self.assertTrue(interp.is_running()) + + +class TestInterpreterRun(TestBase): + + SCRIPT = dedent(""" + with open('{}', 'w') as out: + out.write('{}') + """) + FILENAME = 'spam' + + def setUp(self): + super().setUp() + self.interp = interpreters.create() + self._fs = None + + def tearDown(self): + if self._fs is not None: + self._fs.close() + super().tearDown() + + @property + def fs(self): + if self._fs is None: + self._fs = FSFixture(self) + return self._fs + + def test_success(self): + script, file = _captured_script('print("it worked!", end="")') + with file: + self.interp.run(script) + out = file.read() + + self.assertEqual(out, 'it worked!') + + def test_in_thread(self): + script, file = _captured_script('print("it worked!", end="")') + with file: + def f(): + self.interp.run(script) + + t = threading.Thread(target=f) + t.start() + t.join() + out = file.read() + + self.assertEqual(out, 'it worked!') + + @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()") + def test_fork(self): + import tempfile + with tempfile.NamedTemporaryFile('w+') as file: + file.write('') + file.flush() + + expected = 'spam spam spam spam spam' + script = dedent(f""" + import os + try: + os.fork() + except RuntimeError: + with open('{file.name}', 'w') as out: + out.write('{expected}') + """) + self.interp.run(script) + + file.seek(0) + content = file.read() + self.assertEqual(content, expected) + + def test_already_running(self): + with _running(self.interp): + with self.assertRaises(RuntimeError): + self.interp.run('print("spam")') + + def test_bad_script(self): + with self.assertRaises(TypeError): + self.interp.run(10) + + def test_bytes_for_script(self): + with self.assertRaises(TypeError): + self.interp.run(b'print("spam")') + + +class TestIsShareable(TestBase): + + def test_default_shareables(self): + shareables = [ + # singletons + None, + # builtin objects + b'spam', + 'spam', + 10, + -10, + ] + for obj in shareables: + with self.subTest(obj): + self.assertTrue( + interpreters.is_shareable(obj)) + + def test_not_shareable(self): + class Cheese: + def __init__(self, name): + self.name = name + def __str__(self): + return self.name + + class SubBytes(bytes): + """A subclass of a shareable type.""" + + not_shareables = [ + # singletons + True, + False, + NotImplemented, + ..., + # builtin types and objects + type, + object, + object(), + Exception(), + 100.0, + # user-defined types and objects + Cheese, + Cheese('Wensleydale'), + SubBytes(b'spam'), + ] + for obj in not_shareables: + with self.subTest(repr(obj)): + self.assertFalse( + interpreters.is_shareable(obj)) + + +class TestChannel(TestBase): + + def test_create_cid(self): + r, s = interpreters.create_channel() + self.assertIsInstance(r, interpreters.RecvChannel) + self.assertIsInstance(s, interpreters.SendChannel) + + def test_sequential_ids(self): + before = interpreters.list_all_channels() + channels1 = interpreters.create_channel() + channels2 = interpreters.create_channel() + channels3 = interpreters.create_channel() + after = interpreters.list_all_channels() + + self.assertEqual(len(set(after) - set(before)), + len({channels1, channels2, channels3})) + + +class TestSendRecv(TestBase): + + def test_send_recv_main(self): + r, s = interpreters.create_channel() + orig = b'spam' + s.send(orig) + obj = r.recv() + + self.assertEqual(obj, orig) + self.assertIsNot(obj, orig) + + def test_send_recv_same_interpreter(self): + interp = interpreters.create() + out = _run_output(interp, dedent(""" + from test.support import interpreters + r, s = interpreters.create_channel() + orig = b'spam' + s.send(orig) + obj = r.recv() + assert obj is not orig + assert obj == orig + """)) + + def test_send_recv_different_threads(self): + r, s = interpreters.create_channel() + + def f(): + while True: + try: + obj = r.recv() + break + except interpreters.ChannelEmptyError: + time.sleep(0.1) + s.send(obj) + t = threading.Thread(target=f) + t.start() + + s.send(b'spam') + t.join() + obj = r.recv() + + self.assertEqual(obj, b'spam') + + def test_send_recv_nowait_main(self): + r, s = interpreters.create_channel() + orig = b'spam' + s.send(orig) + obj = r.recv_nowait() + + self.assertEqual(obj, orig) + self.assertIsNot(obj, orig) + + def test_send_recv_nowait_same_interpreter(self): + interp = interpreters.create() + out = _run_output(interp, dedent(""" + from test.support import interpreters + r, s = interpreters.create_channel() + orig = b'spam' + s.send(orig) + obj = r.recv_nowait() + assert obj is not orig + assert obj == orig + """)) + + r, s = interpreters.create_channel() + + def f(): + while True: + try: + obj = r.recv_nowait() + break + except _interpreters.ChannelEmptyError: + time.sleep(0.1) + s.send(obj) diff --git a/Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst b/Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst new file mode 100644 index 0000000000000..1129cd7649b96 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst @@ -0,0 +1,2 @@ +PEP 554 for use in the test suite. +(Patch By Joannah Nanjekye) \ No newline at end of file From webhook-mailer at python.org Tue May 19 18:10:11 2020 From: webhook-mailer at python.org (Steve Dower) Date: Tue, 19 May 2020 22:10:11 -0000 Subject: [Python-checkins] bpo-39631: Adds NEWS entry (GH-20227) Message-ID: https://github.com/python/cpython/commit/92327a9913150f5bb55b2727a2c5d50f9b7b6e55 commit: 92327a9913150f5bb55b2727a2c5d50f9b7b6e55 branch: master author: Steve Dower committer: GitHub date: 2020-05-19T23:10:03+01:00 summary: bpo-39631: Adds NEWS entry (GH-20227) files: A Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst diff --git a/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst b/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst new file mode 100644 index 0000000000000..38db4b431b6af --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst @@ -0,0 +1,2 @@ +Changes the registered MIME type for ``.py`` files on Windows to +``text/x-python`` instead of ``text/plain``. From webhook-mailer at python.org Tue May 19 18:14:21 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Tue, 19 May 2020 22:14:21 -0000 Subject: [Python-checkins] bpo-38870: invalid escape sequence (GH-20240) Message-ID: https://github.com/python/cpython/commit/dd74b6fde31aff9aa46c4fc2a830c569764e1b63 commit: dd74b6fde31aff9aa46c4fc2a830c569764e1b63 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-19T15:14:14-07:00 summary: bpo-38870: invalid escape sequence (GH-20240) `/home/isidentical/cpython/cpython/Lib/test/test_unparse.py:333: DeprecationWarning: invalid escape sequence \X` Automerge-Triggered-By: @pablogsal files: M Lib/test/test_unparse.py diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index bb725ced64db8..532aa3a639041 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -330,7 +330,7 @@ def test_docstrings(self): '\r\\r\t\\t\n\\n', '""">>> content = \"\"\"blabla\"\"\" <<<"""', r'foo\n\x00', - '??????^\X\BB\N{LONG RIGHTWARDS SQUIGGLE ARROW}' + '??????^\N{LONG RIGHTWARDS SQUIGGLE ARROW}' ) for docstring in docstrings: From webhook-mailer at python.org Tue May 19 18:17:15 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 22:17:15 -0000 Subject: [Python-checkins] bpo-39631: Adds NEWS entry (GH-20227) Message-ID: https://github.com/python/cpython/commit/67bbb5d4381b6121a4f61ba945c58056e5894846 commit: 67bbb5d4381b6121a4f61ba945c58056e5894846 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T15:17:11-07:00 summary: bpo-39631: Adds NEWS entry (GH-20227) (cherry picked from commit 92327a9913150f5bb55b2727a2c5d50f9b7b6e55) Co-authored-by: Steve Dower files: A Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst diff --git a/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst b/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst new file mode 100644 index 0000000000000..38db4b431b6af --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst @@ -0,0 +1,2 @@ +Changes the registered MIME type for ``.py`` files on Windows to +``text/x-python`` instead of ``text/plain``. From webhook-mailer at python.org Tue May 19 18:19:33 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 19 May 2020 22:19:33 -0000 Subject: [Python-checkins] bpo-39631: Adds NEWS entry (GH-20227) Message-ID: https://github.com/python/cpython/commit/381ceeaa5980643fa1f958f112f373d7a197e6e8 commit: 381ceeaa5980643fa1f958f112f373d7a197e6e8 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-19T15:19:26-07:00 summary: bpo-39631: Adds NEWS entry (GH-20227) (cherry picked from commit 92327a9913150f5bb55b2727a2c5d50f9b7b6e55) Co-authored-by: Steve Dower files: A Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst diff --git a/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst b/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst new file mode 100644 index 0000000000000..38db4b431b6af --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst @@ -0,0 +1,2 @@ +Changes the registered MIME type for ``.py`` files on Windows to +``text/x-python`` instead of ``text/plain``. From webhook-mailer at python.org Tue May 19 18:35:59 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Tue, 19 May 2020 22:35:59 -0000 Subject: [Python-checkins] bpo-40645: restrict HMAC key len to INT_MAX (GH-20238) Message-ID: https://github.com/python/cpython/commit/aca4670ad695d4b01c7880fe3d0af817421945bd commit: aca4670ad695d4b01c7880fe3d0af817421945bd branch: master author: Christian Heimes committer: GitHub date: 2020-05-19T15:35:51-07:00 summary: bpo-40645: restrict HMAC key len to INT_MAX (GH-20238) Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran files: M Modules/_hashopenssl.c diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 36ad6a65d72cf..674bddc090a6f 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -1403,6 +1403,12 @@ _hashlib_hmac_new_impl(PyObject *module, Py_buffer *key, PyObject *msg_obj, HMACobject *self = NULL; int r; + if (key->len > INT_MAX) { + PyErr_SetString(PyExc_OverflowError, + "key is too long."); + return NULL; + } + if ((digestmod == NULL) || !strlen(digestmod)) { PyErr_SetString( PyExc_TypeError, "Missing required parameter 'digestmod'."); @@ -1424,7 +1430,7 @@ _hashlib_hmac_new_impl(PyObject *module, Py_buffer *key, PyObject *msg_obj, r = HMAC_Init_ex( ctx, (const char*)key->buf, - key->len, + (int)key->len, digest, NULL /*impl*/); if (r == 0) { From webhook-mailer at python.org Tue May 19 19:57:23 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 19 May 2020 23:57:23 -0000 Subject: [Python-checkins] bpo-40521: Fix update_slot() when INTERN_NAME_STRINGS is not defined (#20246) Message-ID: https://github.com/python/cpython/commit/0509c4547fc95cc32a91ac446a26192c3bfdf157 commit: 0509c4547fc95cc32a91ac446a26192c3bfdf157 branch: master author: Victor Stinner committer: GitHub date: 2020-05-20T01:57:17+02:00 summary: bpo-40521: Fix update_slot() when INTERN_NAME_STRINGS is not defined (#20246) Fix type update_slot() function when the macro INTERN_NAME_STRINGS is not defined: use _PyUnicode_EQ() in this case. files: M Objects/typeobject.c diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 243f8811b6257..0e055d677f139 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -7661,8 +7661,17 @@ update_slot(PyTypeObject *type, PyObject *name) assert(slotdefs_initialized); pp = ptrs; for (p = slotdefs; p->name; p++) { - if (p->name_strobj == name) + assert(PyUnicode_CheckExact(p->name_strobj)); + assert(PyUnicode_CheckExact(name)); +#ifdef INTERN_NAME_STRINGS + if (p->name_strobj == name) { + *pp++ = p; + } +#else + if (p->name_strobj == name || _PyUnicode_EQ(p->name_strobj, name)) { *pp++ = p; + } +#endif } *pp = NULL; for (pp = ptrs; *pp; pp++) { From webhook-mailer at python.org Tue May 19 22:31:56 2020 From: webhook-mailer at python.org (karl ding) Date: Wed, 20 May 2020 02:31:56 -0000 Subject: [Python-checkins] bpo-40291: Mention socket.CAN_J1939 in What's New (GH-20248) Message-ID: https://github.com/python/cpython/commit/550f30c8f33a2ba844db2ce3da8a897b3e882c9a commit: 550f30c8f33a2ba844db2ce3da8a897b3e882c9a branch: master author: karl ding committer: GitHub date: 2020-05-19T19:31:49-07:00 summary: bpo-40291: Mention socket.CAN_J1939 in What's New (GH-20248) This mentions the new CAN_J1939 implementation in the What's New documentation for Python 3.9 Automerge-Triggered-By: @gvanrossum files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 037e1055c79e5..86458a39df261 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -530,6 +530,9 @@ The :mod:`socket` module now exports the :data:`~socket.CAN_RAW_JOIN_FILTERS` constant on Linux 4.1 and greater. (Contributed by Stefan Tatschner and Zackery Spytz in :issue:`25780`.) +The socket module now supports the :data:`~socket.CAN_J1939` protocol on +platforms that support it. (Contributed by Karl Ding in :issue:`40291`.) + time ---- From webhook-mailer at python.org Wed May 20 00:59:55 2020 From: webhook-mailer at python.org (Jonathan Goble) Date: Wed, 20 May 2020 04:59:55 -0000 Subject: [Python-checkins] Fix the URL to fishshell.com (GH-20251) Message-ID: https://github.com/python/cpython/commit/da7d1f04086598a29f77bd452beefe847d038344 commit: da7d1f04086598a29f77bd452beefe847d038344 branch: master author: Jonathan Goble committer: GitHub date: 2020-05-19T21:59:46-07:00 summary: Fix the URL to fishshell.com (GH-20251) files: M Lib/venv/scripts/posix/activate.fish diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/posix/activate.fish index cb1ba1c301ede..54b9ea5676b66 100644 --- a/Lib/venv/scripts/posix/activate.fish +++ b/Lib/venv/scripts/posix/activate.fish @@ -1,5 +1,5 @@ # This file must be used with "source /bin/activate.fish" *from fish* -# (http://fishshell.org); you cannot run it directly. +# (https://fishshell.com/); you cannot run it directly. function deactivate -d "Exit virtual environment and return to normal shell environment" # reset old environment variables From webhook-mailer at python.org Wed May 20 05:41:41 2020 From: webhook-mailer at python.org (Ned Deily) Date: Wed, 20 May 2020 09:41:41 -0000 Subject: [Python-checkins] bpo-34956: edit and format better NEWS item in 3.9.0b1 changelog (GH-20255) Message-ID: https://github.com/python/cpython/commit/bac170cd93bbae939fcb29ccc6b5d423f7f4a089 commit: bac170cd93bbae939fcb29ccc6b5d423f7f4a089 branch: master author: Ned Deily committer: GitHub date: 2020-05-20T05:41:26-04:00 summary: bpo-34956: edit and format better NEWS item in 3.9.0b1 changelog (GH-20255) files: M Misc/NEWS.d/3.9.0b1.rst diff --git a/Misc/NEWS.d/3.9.0b1.rst b/Misc/NEWS.d/3.9.0b1.rst index 25dd405c13039..51dc9ce0ec037 100644 --- a/Misc/NEWS.d/3.9.0b1.rst +++ b/Misc/NEWS.d/3.9.0b1.rst @@ -800,12 +800,13 @@ it's not coming from Windows API. .. nonce: 35IcGF .. section: macOS -_tkinter now builds and links with non-system Tcl and Tk frameworks if they -are installed in /Library/Frameworks as had been the case on older releases -of macOS. If a macOS SDK is explicitly configured, by using ./configure ---enable-universalsdk= or -isysroot, only a Library/Frameworks directory in -the SDK itself is searched. The default behavior can still be overridden -with configure --with-tcltk-includes and --with-tcltk-libs. +When building Python on macOS from source, ``_tkinter`` now links with +non-system Tcl and Tk frameworks if they are installed in +``/Library/Frameworks``, as had been the case on older releases +of macOS. If a macOS SDK is explicitly configured, by using +``--enable-universalsdk=`` or ``-isysroot``, only the SDK itself is +searched. The default behavior can still be overridden with +``--with-tcltk-includes`` and ``--with-tcltk-libs``. .. From webhook-mailer at python.org Wed May 20 10:37:34 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Wed, 20 May 2020 14:37:34 -0000 Subject: [Python-checkins] bpo-40698: Improve distutils upload hash digests (GH-20260) Message-ID: https://github.com/python/cpython/commit/e572c7f6dbe5397153803eab256e4a4ca3384f80 commit: e572c7f6dbe5397153803eab256e4a4ca3384f80 branch: master author: Christian Heimes committer: GitHub date: 2020-05-20T07:37:25-07:00 summary: bpo-40698: Improve distutils upload hash digests (GH-20260) - Fix upload test on systems that blocks MD5 - Add SHA2-256 and Blake2b-256 digests based on new Warehous and twine specs. Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Library/2020-05-20-14-38-04.bpo-40698.zwl5Hc.rst M Doc/whatsnew/3.9.rst M Lib/distutils/command/upload.py M Lib/distutils/tests/test_upload.py diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 86458a39df261..a483b19d151e2 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -331,6 +331,13 @@ and :meth:`~datetime.datetime.isocalendar()` of :class:`datetime.datetime` methods now returns a :func:`~collections.namedtuple` instead of a :class:`tuple`. (Contributed by Dong-hee Na in :issue:`24416`.) +distutils +--------- + +The :command:`upload` command now creates SHA2-256 and Blake2b-256 hash +digests. It skips MD5 on platforms that block MD5 digest. +(Contributed by Christian Heimes in :issue:`40698`.) + fcntl ----- diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py index d822ba01338af..95e9fda186fc8 100644 --- a/Lib/distutils/command/upload.py +++ b/Lib/distutils/command/upload.py @@ -16,6 +16,16 @@ from distutils.spawn import spawn from distutils import log + +# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256) +# https://bugs.python.org/issue40698 +_FILE_CONTENT_DIGESTS = { + "md5_digest": getattr(hashlib, "md5", None), + "sha256_digest": getattr(hashlib, "sha256", None), + "blake2_256_digest": getattr(hashlib, "blake2b", None), +} + + class upload(PyPIRCCommand): description = "upload binary package to PyPI" @@ -87,6 +97,7 @@ def upload_file(self, command, pyversion, filename): content = f.read() finally: f.close() + meta = self.distribution.metadata data = { # action @@ -101,7 +112,6 @@ def upload_file(self, command, pyversion, filename): 'content': (os.path.basename(filename),content), 'filetype': command, 'pyversion': pyversion, - 'md5_digest': hashlib.md5(content).hexdigest(), # additional meta-data 'metadata_version': '1.0', @@ -123,6 +133,16 @@ def upload_file(self, command, pyversion, filename): data['comment'] = '' + # file content digests + for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items(): + if digest_cons is None: + continue + try: + data[digest_name] = digest_cons(content).hexdigest() + except ValueError: + # hash digest not available or blocked by security policy + pass + if self.sign: with open(filename + ".asc", "rb") as f: data['gpg_signature'] = (os.path.basename(filename) + ".asc", diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py index c17d8e7d54e98..bca5516d2f74f 100644 --- a/Lib/distutils/tests/test_upload.py +++ b/Lib/distutils/tests/test_upload.py @@ -130,14 +130,30 @@ def test_upload(self): # what did we send ? headers = dict(self.last_open.req.headers) - self.assertEqual(headers['Content-length'], '2162') + self.assertGreaterEqual(int(headers['Content-length']), 2162) content_type = headers['Content-type'] self.assertTrue(content_type.startswith('multipart/form-data')) self.assertEqual(self.last_open.req.get_method(), 'POST') expected_url = 'https://upload.pypi.org/legacy/' self.assertEqual(self.last_open.req.get_full_url(), expected_url) - self.assertTrue(b'xxx' in self.last_open.req.data) - self.assertIn(b'protocol_version', self.last_open.req.data) + data = self.last_open.req.data + self.assertIn(b'xxx',data) + self.assertIn(b'protocol_version', data) + self.assertIn(b'sha256_digest', data) + self.assertIn( + b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf' + b'6860', + data + ) + if b'md5_digest' in data: + self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data) + if b'blake2_256_digest' in data: + self.assertIn( + b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be' + b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc' + b'ce443f1534330a', + data + ) # The PyPI response body was echoed results = self.get_logs(INFO) @@ -166,7 +182,7 @@ def test_upload_correct_cr(self): cmd.run() headers = dict(self.last_open.req.headers) - self.assertEqual(headers['Content-length'], '2172') + self.assertGreaterEqual(int(headers['Content-length']), 2172) self.assertIn(b'long description\r', self.last_open.req.data) def test_upload_fails(self): diff --git a/Misc/NEWS.d/next/Library/2020-05-20-14-38-04.bpo-40698.zwl5Hc.rst b/Misc/NEWS.d/next/Library/2020-05-20-14-38-04.bpo-40698.zwl5Hc.rst new file mode 100644 index 0000000000000..e57624819d54a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-20-14-38-04.bpo-40698.zwl5Hc.rst @@ -0,0 +1,2 @@ +:mod:`distutils` upload creates SHA2-256 and Blake2b-256 digests. MD5 +digests is skipped if platform blocks MD5. From webhook-mailer at python.org Wed May 20 12:38:50 2020 From: webhook-mailer at python.org (Hugo van Kemenade) Date: Wed, 20 May 2020 16:38:50 -0000 Subject: [Python-checkins] Use v2 of GitHub Actions where available (GH-20232) Message-ID: https://github.com/python/cpython/commit/7fbe65dbc42985d002d9538fe9a9887e6e6af66c commit: 7fbe65dbc42985d002d9538fe9a9887e6e6af66c branch: master author: Hugo van Kemenade committer: GitHub date: 2020-05-20T09:38:41-07:00 summary: Use v2 of GitHub Actions where available (GH-20232) Automerge-Triggered-By: @Mariatta files: M .github/workflows/build_msi.yml M .github/workflows/coverage.yml M .github/workflows/doc.yml diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml index fb58e417cdd4f..182eb7ce57167 100644 --- a/.github/workflows/build_msi.yml +++ b/.github/workflows/build_msi.yml @@ -23,7 +23,7 @@ jobs: name: 'Windows (x86) Installer' runs-on: windows-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython installer run: .\Tools\msi\build.bat -x86 @@ -31,6 +31,6 @@ jobs: name: 'Windows (x64) Installer' runs-on: windows-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython installer run: .\Tools\msi\build.bat -x64 diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 6fc15d3bffd5e..4f46cbf1100e9 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -27,7 +27,7 @@ jobs: env: OPENSSL_VER: 1.1.1f steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: 'Restore OpenSSL build' @@ -76,7 +76,7 @@ jobs: name: 'Ubuntu (C Coverage)' runs-on: ubuntu-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: Configure CPython diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 44107853ee7cf..d481ea279d796 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -24,7 +24,7 @@ jobs: name: 'Docs' runs-on: ubuntu-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: 'Install Dependencies' run: sudo ./.github/workflows/posix-deps-apt.sh && sudo apt-get install wamerican - name: 'Configure CPython' From webhook-mailer at python.org Wed May 20 12:59:44 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 20 May 2020 16:59:44 -0000 Subject: [Python-checkins] Use v2 of GitHub Actions where available (GH-20232) Message-ID: https://github.com/python/cpython/commit/9ef49e1876f9d5d68cc74591de22e30c6a3ac7ad commit: 9ef49e1876f9d5d68cc74591de22e30c6a3ac7ad branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-20T09:59:40-07:00 summary: Use v2 of GitHub Actions where available (GH-20232) Automerge-Triggered-By: @Mariatta (cherry picked from commit 7fbe65dbc42985d002d9538fe9a9887e6e6af66c) Co-authored-by: Hugo van Kemenade files: M .github/workflows/build_msi.yml M .github/workflows/coverage.yml M .github/workflows/doc.yml diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml index e9ecf54727562..769b3d012e940 100644 --- a/.github/workflows/build_msi.yml +++ b/.github/workflows/build_msi.yml @@ -21,7 +21,7 @@ jobs: name: 'Windows (x86) Installer' runs-on: windows-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython installer run: .\Tools\msi\build.bat -x86 @@ -29,6 +29,6 @@ jobs: name: 'Windows (x64) Installer' runs-on: windows-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython installer run: .\Tools\msi\build.bat -x64 diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index e58ad4a1dfd48..b5668f85dd7ab 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -25,7 +25,7 @@ jobs: env: OPENSSL_VER: 1.1.1f steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: 'Restore OpenSSL build' @@ -73,7 +73,7 @@ jobs: name: 'Ubuntu (C Coverage)' runs-on: ubuntu-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: Configure CPython diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index c8d395cea5156..e7d99583e6909 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -22,7 +22,7 @@ jobs: name: 'Docs' runs-on: ubuntu-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: 'Install Dependencies' run: sudo ./.github/workflows/posix-deps-apt.sh && sudo apt-get install wamerican - name: 'Configure CPython' From webhook-mailer at python.org Wed May 20 13:02:18 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 20 May 2020 17:02:18 -0000 Subject: [Python-checkins] Use v2 of GitHub Actions where available (GH-20232) Message-ID: https://github.com/python/cpython/commit/f24550a245198a6dbe05fb398eaaa5477aeb6993 commit: f24550a245198a6dbe05fb398eaaa5477aeb6993 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-20T10:02:07-07:00 summary: Use v2 of GitHub Actions where available (GH-20232) Automerge-Triggered-By: @Mariatta (cherry picked from commit 7fbe65dbc42985d002d9538fe9a9887e6e6af66c) Co-authored-by: Hugo van Kemenade files: M .github/workflows/build_msi.yml M .github/workflows/coverage.yml M .github/workflows/doc.yml diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml index e9ecf54727562..769b3d012e940 100644 --- a/.github/workflows/build_msi.yml +++ b/.github/workflows/build_msi.yml @@ -21,7 +21,7 @@ jobs: name: 'Windows (x86) Installer' runs-on: windows-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython installer run: .\Tools\msi\build.bat -x86 @@ -29,6 +29,6 @@ jobs: name: 'Windows (x64) Installer' runs-on: windows-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython installer run: .\Tools\msi\build.bat -x64 diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index e58ad4a1dfd48..b5668f85dd7ab 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -25,7 +25,7 @@ jobs: env: OPENSSL_VER: 1.1.1f steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: 'Restore OpenSSL build' @@ -73,7 +73,7 @@ jobs: name: 'Ubuntu (C Coverage)' runs-on: ubuntu-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: Configure CPython diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index e943d32c24038..6dba417e93e64 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -22,7 +22,7 @@ jobs: name: 'Docs' runs-on: ubuntu-latest steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: 'Install Dependencies' run: sudo ./.github/workflows/posix-deps-apt.sh && sudo apt-get install wamerican - name: 'Configure CPython' From webhook-mailer at python.org Wed May 20 13:12:45 2020 From: webhook-mailer at python.org (Kunal Bhalla) Date: Wed, 20 May 2020 17:12:45 -0000 Subject: [Python-checkins] s/wakup/wakeup (GH-20250) Message-ID: https://github.com/python/cpython/commit/f2947e354c95d246b1836ac78d4c820c420e259b commit: f2947e354c95d246b1836ac78d4c820c420e259b branch: master author: Kunal Bhalla committer: GitHub date: 2020-05-20T10:12:37-07:00 summary: s/wakup/wakeup (GH-20250) (as title) Automerge-Triggered-By: @Mariatta files: M Lib/asyncio/unix_events.py diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 19d713545e4cd..f34a5b4b44373 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -101,7 +101,7 @@ def add_signal_handler(self, sig, callback, *args): try: # Register a dummy signal handler to ask Python to write the signal - # number in the wakup file descriptor. _process_self_data() will + # number in the wakeup file descriptor. _process_self_data() will # read signal numbers from this file descriptor to handle signals. signal.signal(sig, _sighandler_noop) From webhook-mailer at python.org Wed May 20 13:58:59 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 20 May 2020 17:58:59 -0000 Subject: [Python-checkins] s/wakup/wakeup (GH-20250) Message-ID: https://github.com/python/cpython/commit/a59fc9160db50a5e20358303e7ddbda180a1b74f commit: a59fc9160db50a5e20358303e7ddbda180a1b74f branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-20T10:58:55-07:00 summary: s/wakup/wakeup (GH-20250) (as title) Automerge-Triggered-By: @Mariatta (cherry picked from commit f2947e354c95d246b1836ac78d4c820c420e259b) Co-authored-by: Kunal Bhalla files: M Lib/asyncio/unix_events.py diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 8c0a57482b7a4..1ff8c427da4a6 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -101,7 +101,7 @@ def add_signal_handler(self, sig, callback, *args): try: # Register a dummy signal handler to ask Python to write the signal - # number in the wakup file descriptor. _process_self_data() will + # number in the wakeup file descriptor. _process_self_data() will # read signal numbers from this file descriptor to handle signals. signal.signal(sig, _sighandler_noop) From webhook-mailer at python.org Wed May 20 13:59:04 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 20 May 2020 17:59:04 -0000 Subject: [Python-checkins] s/wakup/wakeup (GH-20250) Message-ID: https://github.com/python/cpython/commit/e91032b677dd30cc21d6612b3a73e34e5d434b98 commit: e91032b677dd30cc21d6612b3a73e34e5d434b98 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-20T10:59:00-07:00 summary: s/wakup/wakeup (GH-20250) (as title) Automerge-Triggered-By: @Mariatta (cherry picked from commit f2947e354c95d246b1836ac78d4c820c420e259b) Co-authored-by: Kunal Bhalla files: M Lib/asyncio/unix_events.py diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 7034fc28d3e1e..e037e12965bd6 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -98,7 +98,7 @@ def add_signal_handler(self, sig, callback, *args): try: # Register a dummy signal handler to ask Python to write the signal - # number in the wakup file descriptor. _process_self_data() will + # number in the wakeup file descriptor. _process_self_data() will # read signal numbers from this file descriptor to handle signals. signal.signal(sig, _sighandler_noop) From webhook-mailer at python.org Wed May 20 21:43:59 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Thu, 21 May 2020 01:43:59 -0000 Subject: [Python-checkins] Update whatsnew benchmark results for 3.9 (GH-20276) Message-ID: https://github.com/python/cpython/commit/df2e0ff0d63b07a1c8fdfd3674d99bd4f2fb69c5 commit: df2e0ff0d63b07a1c8fdfd3674d99bd4f2fb69c5 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-20T18:43:51-07:00 summary: Update whatsnew benchmark results for 3.9 (GH-20276) files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index a483b19d151e2..72ea0c076e3e3 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -624,6 +624,61 @@ Optimizations (Contributed by Ed Maste, Conrad Meyer, Kyle Evans, Kubilay Kocak and Victor Stinner in :issue:`38061`.) +Here's a summary of performance improvements from Python 3.4 through Python 3.9: + +.. code-block:: none + + Python version 3.4 3.5 3.6 3.7 3.8 3.9 + -------------- --- --- --- --- --- --- + + Variable and attribute read access: + read_local 7.1 7.1 5.4 5.1 3.9 4.0 + read_nonlocal 7.1 8.1 5.8 5.4 4.4 4.8 + read_global 5.5 19.0 14.3 13.6 7.6 7.7 + read_builtin 1.1 21.6 18.5 19.0 7.5 7.7 + read_classvar_from_class 5.6 26.5 20.7 19.5 18.4 18.6 + read_classvar_from_instance 2.8 23.5 18.8 17.1 16.4 20.1 + read_instancevar 2.4 33.1 28.0 26.3 25.4 27.7 + read_instancevar_slots 7.8 31.3 20.8 20.8 20.2 24.5 + read_namedtuple 3.8 57.5 45.0 46.8 18.4 23.2 + read_boundmethod 7.6 37.9 29.6 26.9 27.7 45.9 + + Variable and attribute write access: + write_local 8.7 9.3 5.5 5.3 4.3 4.2 + write_nonlocal 0.5 11.1 5.6 5.5 4.7 4.9 + write_global 9.7 21.2 18.0 18.0 15.8 17.2 + write_classvar 2.9 96.0 104.6 102.1 39.2 43.2 + write_instancevar 4.6 45.8 40.0 38.9 35.5 40.7 + write_instancevar_slots 5.6 36.1 27.3 26.6 25.7 27.7 + + Data structure read access: + read_list 4.2 24.5 20.8 20.8 19.0 21.1 + read_deque 4.7 25.5 20.2 20.6 19.8 21.6 + read_dict 4.3 25.7 22.3 23.0 21.0 22.5 + read_strdict 2.6 24.3 19.5 21.2 18.9 21.6 + + Data structure write access: + write_list 7.1 28.5 22.5 21.6 20.0 21.6 + write_deque 8.7 30.1 22.7 21.8 23.5 23.2 + write_dict 1.4 33.3 29.3 29.2 24.7 27.8 + write_strdict 8.4 29.9 27.5 25.2 23.1 29.8 + + Stack (or queue) operations: + list_append_pop 13.4 112.7 75.4 74.2 50.8 53.9 + deque_append_pop 3.5 57.0 49.4 49.2 42.5 45.5 + deque_append_popleft 3.7 57.3 49.7 49.7 42.8 45.5 + + Timing loop: + loop_overhead 0.5 0.6 0.4 0.3 0.3 0.3 + +These results were generated from the variable access benchmark script at: +``Tools/scripts/var_access_benchmark.py``. The benchmark script displays timings +in nanoseconds. The benchmarks were measured on an +`Intel? Core? i7-4960HQ processor +`_ +running the macOS 64-bit builds found at +`python.org `_. + Deprecated ========== From webhook-mailer at python.org Thu May 21 00:21:10 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 21 May 2020 04:21:10 -0000 Subject: [Python-checkins] bpo-40651: Improve LRU recipe in the OrderedDict documentation (GH-GH-20139) (GH-20167) Message-ID: https://github.com/python/cpython/commit/d88f0aa8e24ea7562f2e04833f46d8526e846334 commit: d88f0aa8e24ea7562f2e04833f46d8526e846334 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-20T21:21:02-07:00 summary: bpo-40651: Improve LRU recipe in the OrderedDict documentation (GH-GH-20139) (GH-20167) files: M Doc/library/collections.rst diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index 8dcf9451d72bf..d4297166597b6 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -1150,6 +1150,8 @@ variants of :func:`functools.lru_cache`:: return value def __setitem__(self, key, value): + if key in self: + self.move_to_end(key) super().__setitem__(key, value) if len(self) > self.maxsize: oldest = next(iter(self)) From webhook-mailer at python.org Thu May 21 01:20:50 2020 From: webhook-mailer at python.org (Kyle Stanley) Date: Thu, 21 May 2020 05:20:50 -0000 Subject: [Python-checkins] bpo-32309: Add support for contextvars in asyncio.to_thread() (GH-20278) Message-ID: https://github.com/python/cpython/commit/0f56263e62ba91d0baae40fb98947a3a98034a73 commit: 0f56263e62ba91d0baae40fb98947a3a98034a73 branch: master author: Kyle Stanley committer: GitHub date: 2020-05-20T22:20:43-07:00 summary: bpo-32309: Add support for contextvars in asyncio.to_thread() (GH-20278) Allows contextvars from the main thread to be accessed in the separate thread used in `asyncio.to_thread()`. See the [discussion](https://github.com/python/cpython/pull/20143#discussion_r427808225) in GH-20143 for context. Automerge-Triggered-By: @aeros files: M Doc/library/asyncio-task.rst M Lib/asyncio/threads.py M Lib/test/test_asyncio/test_threads.py diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 7c2704090551b..dd94c14854835 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -610,7 +610,9 @@ Running in Threads Asynchronously run function *func* in a separate thread. Any \*args and \*\*kwargs supplied for this function are directly passed - to *func*. + to *func*. Also, the current :class:`contextvars.Context` is propogated, + allowing context variables from the event loop thread to be accessed in the + separate thread. Return an :class:`asyncio.Future` which represents the eventual result of *func*. @@ -657,6 +659,8 @@ Running in Threads that release the GIL or alternative Python implementations that don't have one, `asyncio.to_thread()` can also be used for CPU-bound functions. + .. versionadded:: 3.9 + Scheduling From Other Threads ============================= diff --git a/Lib/asyncio/threads.py b/Lib/asyncio/threads.py index 2f40467fe5bc7..51e0ba95d822e 100644 --- a/Lib/asyncio/threads.py +++ b/Lib/asyncio/threads.py @@ -1,6 +1,7 @@ """High-level support for working with threads in asyncio""" import functools +import contextvars from . import events @@ -12,10 +13,13 @@ async def to_thread(func, /, *args, **kwargs): """Asynchronously run function *func* in a separate thread. Any *args and **kwargs supplied for this function are directly passed - to *func*. + to *func*. Also, the current :class:`contextvars.Context` is propogated, + allowing context variables from the main thread to be accessed in the + separate thread. Return an asyncio.Future which represents the eventual result of *func*. """ loop = events.get_running_loop() - func_call = functools.partial(func, *args, **kwargs) + ctx = contextvars.copy_context() + func_call = functools.partial(ctx.run, func, *args, **kwargs) return await loop.run_in_executor(None, func_call) diff --git a/Lib/test/test_asyncio/test_threads.py b/Lib/test/test_asyncio/test_threads.py index 99a00f21832f3..2af322421dacf 100644 --- a/Lib/test/test_asyncio/test_threads.py +++ b/Lib/test/test_asyncio/test_threads.py @@ -3,6 +3,7 @@ import asyncio import unittest +from contextvars import ContextVar from unittest import mock from test.test_asyncio import utils as test_utils @@ -74,6 +75,19 @@ async def main(): self.loop.run_until_complete(main()) func.assert_called_once_with('test', something=True) + def test_to_thread_contextvars(self): + test_ctx = ContextVar('test_ctx') + + def get_ctx(): + return test_ctx.get() + + async def main(): + test_ctx.set('parrot') + return await asyncio.to_thread(get_ctx) + + result = self.loop.run_until_complete(main()) + self.assertEqual(result, 'parrot') + if __name__ == "__main__": unittest.main() From webhook-mailer at python.org Thu May 21 04:37:53 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Thu, 21 May 2020 08:37:53 -0000 Subject: [Python-checkins] Improve output summary in the examples and recipes section (GH-20285) Message-ID: https://github.com/python/cpython/commit/e16d2f7c37b2689dd3cb1ac36bcf875926b5f1f6 commit: e16d2f7c37b2689dd3cb1ac36bcf875926b5f1f6 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-21T01:37:38-07:00 summary: Improve output summary in the examples and recipes section (GH-20285) files: M Doc/library/random.rst diff --git a/Doc/library/random.rst b/Doc/library/random.rst index 90366f499cae6..0cdf0a6ac4a47 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -494,7 +494,7 @@ Simulation of arrival times and service deliveries for a multiserver queue:: from heapq import heappush, heappop from random import expovariate, gauss - from statistics import mean, median, stdev + from statistics import mean, quantiles average_arrival_interval = 5.6 average_service_time = 15.0 @@ -513,8 +513,8 @@ Simulation of arrival times and service deliveries for a multiserver queue:: service_completed = arrival_time + wait + service_duration heappush(servers, service_completed) - print(f'Mean wait: {mean(waits):.1f}. Stdev wait: {stdev(waits):.1f}.') - print(f'Median wait: {median(waits):.1f}. Max wait: {max(waits):.1f}.') + print(f'Mean wait: {mean(waits):.1f} Max wait: {max(waits):.1f}') + print('Quartiles:', [round(q, 1) for q in quantiles(waits)]) .. seealso:: From webhook-mailer at python.org Thu May 21 12:56:20 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Thu, 21 May 2020 16:56:20 -0000 Subject: [Python-checkins] bpo-40714: Remove compile warning from _zoneinfo.c (GH-20291) Message-ID: https://github.com/python/cpython/commit/a487a39dca4c41305928c7dfdbcb0b3aa344683b commit: a487a39dca4c41305928c7dfdbcb0b3aa344683b branch: master author: Dong-hee Na committer: GitHub date: 2020-05-22T01:56:03+09:00 summary: bpo-40714: Remove compile warning from _zoneinfo.c (GH-20291) files: M Modules/_zoneinfo.c diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index 9f5e64d8486cc..d7e7157657643 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -1457,7 +1457,9 @@ parse_tz_str(PyObject *tz_str_obj, _tzrule *out) PyObject *dst_abbr = NULL; TransitionRuleType *start = NULL; TransitionRuleType *end = NULL; - long std_offset, dst_offset; + // Initialize offsets to invalid value (> 24 hours) + long std_offset = 1 << 20; + long dst_offset = 1 << 20; char *tz_str = PyBytes_AsString(tz_str_obj); if (tz_str == NULL) { @@ -1907,7 +1909,7 @@ build_tzrule(PyObject *std_abbr, PyObject *dst_abbr, long std_offset, long dst_offset, TransitionRuleType *start, TransitionRuleType *end, _tzrule *out) { - _tzrule rv = {0}; + _tzrule rv = {{0}}; rv.start = start; rv.end = end; From webhook-mailer at python.org Thu May 21 15:57:57 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Thu, 21 May 2020 19:57:57 -0000 Subject: [Python-checkins] bpo-40334: Correctly generate C parser when assigned var is None (GH-20296) Message-ID: https://github.com/python/cpython/commit/f50516e6a978ee694232512399dd1ab47aaebab1 commit: f50516e6a978ee694232512399dd1ab47aaebab1 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-21T20:57:52+01:00 summary: bpo-40334: Correctly generate C parser when assigned var is None (GH-20296) When there are 2 negative lookaheads in the same rule, let's say `!"(" blabla "," !")"`, there will the 2 `FunctionCall`'s where assigned value is None. Currently when the `add_var` is called the first one will be ignored but when the second lookahead's var is sent to dedupe it will be returned as `None_1` and this won't be ignored by the declaration generator in the `visit_Alt`. This patch adds an explicit check to `add_var` to distinguish whether if there is a variable or not. files: M Tools/peg_generator/pegen/c_generator.py diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index c93b348e2b44c..e8107ec044d0a 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -722,4 +722,7 @@ def collect_vars(self, node: Alt) -> Dict[Optional[str], Optional[str]]: def add_var(self, node: NamedItem) -> Tuple[Optional[str], Optional[str]]: call = self.callmakervisitor.visit(node.item) - return self.dedupe(node.name if node.name else call.assigned_variable), call.return_type + name = node.name if node.name else call.assigned_variable + if name is not None: + name = self.dedupe(name) + return name, call.return_type From webhook-mailer at python.org Thu May 21 16:26:35 2020 From: webhook-mailer at python.org (Mathieu Dupuy) Date: Thu, 21 May 2020 20:26:35 -0000 Subject: [Python-checkins] Reword aware/naive introduction sentence (GH-20175) Message-ID: https://github.com/python/cpython/commit/2e76820a50b8ce2a9a5f6cdef6cef1859a89c460 commit: 2e76820a50b8ce2a9a5f6cdef6cef1859a89c460 branch: master author: Mathieu Dupuy committer: GitHub date: 2020-05-21T16:26:27-04:00 summary: Reword aware/naive introduction sentence (GH-20175) This is more informative and avoids the question of whether the period should go inside or outside the quotation marks. See also GH-20007. files: M Doc/library/datetime.rst diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 800361c54ba71..508bc88e7f4b8 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -35,7 +35,8 @@ on efficient attribute extraction for output formatting and manipulation. Aware and Naive Objects ----------------------- -Date and time objects may be categorized as "aware" or "naive". +Date and time objects may be categorized as "aware" or "naive" depending on +whether or not they include timezone information. With sufficient knowledge of applicable algorithmic and political time adjustments, such as time zone and daylight saving time information, From webhook-mailer at python.org Thu May 21 16:39:52 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Thu, 21 May 2020 20:39:52 -0000 Subject: [Python-checkins] Fix typing problems reported by mypy in pegen (GH-20297) Message-ID: https://github.com/python/cpython/commit/d10fef35c6ce8a3193b974be7e8c8304b1146153 commit: d10fef35c6ce8a3193b974be7e8c8304b1146153 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-21T21:39:44+01:00 summary: Fix typing problems reported by mypy in pegen (GH-20297) files: M Tools/peg_generator/pegen/c_generator.py M Tools/peg_generator/pegen/parser_generator.py diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index e8107ec044d0a..586f28cc46975 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -1,5 +1,5 @@ import ast -from dataclasses import dataclass +from dataclasses import field, dataclass import re from typing import Any, Dict, IO, Optional, List, Text, Tuple, Set from enum import Enum @@ -25,6 +25,7 @@ ) from pegen.parser_generator import ParserGenerator + EXTENSION_PREFIX = """\ #include "pegen.h" @@ -63,7 +64,7 @@ class NodeTypes(Enum): @dataclass class FunctionCall: function: str - arguments: Optional[List[Any]] = None + arguments: List[Any] = field(default_factory=list) assigned_variable: Optional[str] = None return_type: Optional[str] = None nodetype: Optional[NodeTypes] = None @@ -94,7 +95,7 @@ def __init__( self.gen = parser_generator self.exact_tokens = exact_tokens self.non_exact_tokens = non_exact_tokens - self.cache: Dict[Any, Any] = {} + self.cache: Dict[Any, FunctionCall] = {} self.keyword_cache: Dict[str, int] = {} def keyword_helper(self, keyword: str) -> FunctionCall: @@ -171,7 +172,7 @@ def can_we_inline(node: Rhs) -> int: if node in self.cache: return self.cache[node] if can_we_inline(node): - self.cache[node] = self.visit(node.alts[0].items[0]) + self.cache[node] = self.generate_call(node.alts[0].items[0]) else: name = self.gen.name_node(node) self.cache[node] = FunctionCall( @@ -183,13 +184,13 @@ def can_we_inline(node: Rhs) -> int: return self.cache[node] def visit_NamedItem(self, node: NamedItem) -> FunctionCall: - call = self.visit(node.item) + call = self.generate_call(node.item) if node.name: call.assigned_variable = node.name return call def lookahead_call_helper(self, node: Lookahead, positive: int) -> FunctionCall: - call = self.visit(node.node) + call = self.generate_call(node.node) if call.nodetype == NodeTypes.NAME_TOKEN: return FunctionCall( function=f"_PyPegen_lookahead_with_name", @@ -217,7 +218,7 @@ def visit_NegativeLookahead(self, node: NegativeLookahead) -> FunctionCall: return self.lookahead_call_helper(node, 0) def visit_Opt(self, node: Opt) -> FunctionCall: - call = self.visit(node.node) + call = self.generate_call(node.node) return FunctionCall( assigned_variable="_opt_var", function=call.function, @@ -266,7 +267,7 @@ def visit_Gather(self, node: Gather) -> FunctionCall: return self.cache[node] def visit_Group(self, node: Group) -> FunctionCall: - return self.visit(node.rhs) + return self.generate_call(node.rhs) def visit_Cut(self, node: Cut) -> FunctionCall: return FunctionCall( @@ -276,6 +277,9 @@ def visit_Cut(self, node: Cut) -> FunctionCall: nodetype=NodeTypes.CUT_OPERATOR, ) + def generate_call(self, node: Any) -> FunctionCall: + return super().visit(node) + class CParserGenerator(ParserGenerator, GrammarVisitor): def __init__( @@ -317,17 +321,13 @@ def call_with_errorcheck_goto(self, call_text: str, goto_target: str) -> None: self.print(f"goto {goto_target};") self.print(f"}}") - def out_of_memory_return( - self, - expr: str, - cleanup_code: Optional[str] = None, - ) -> None: + def out_of_memory_return(self, expr: str, cleanup_code: Optional[str] = None,) -> None: self.print(f"if ({expr}) {{") with self.indent(): if cleanup_code is not None: self.print(cleanup_code) self.print("p->error_indicator = 1;") - self.print("PyErr_NoMemory();"); + self.print("PyErr_NoMemory();") self.print("return NULL;") self.print(f"}}") @@ -484,10 +484,7 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): self._set_up_token_start_metadata_extraction() self.visit( - rhs, - is_loop=False, - is_gather=node.is_gather(), - rulename=node.name, + rhs, is_loop=False, is_gather=node.is_gather(), rulename=node.name, ) if self.debug: self.print('fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') @@ -518,10 +515,7 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): self._set_up_token_start_metadata_extraction() self.visit( - rhs, - is_loop=True, - is_gather=node.is_gather(), - rulename=node.name, + rhs, is_loop=True, is_gather=node.is_gather(), rulename=node.name, ) if is_repeat1: self.print("if (_n == 0 || p->error_indicator) {") @@ -567,7 +561,7 @@ def visit_Rule(self, node: Rule) -> None: self.print("}") def visit_NamedItem(self, node: NamedItem) -> None: - call = self.callmakervisitor.visit(node) + call = self.callmakervisitor.generate_call(node) if call.assigned_variable: call.assigned_variable = self.dedupe(call.assigned_variable) self.print(call) @@ -674,7 +668,9 @@ def handle_alt_loop(self, node: Alt, is_gather: bool, rulename: Optional[str]) - self.print("if (_n == _children_capacity) {") with self.indent(): self.print("_children_capacity *= 2;") - self.print("void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));") + self.print( + "void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));" + ) self.out_of_memory_return(f"!_new_children") self.print("_children = _new_children;") self.print("}") @@ -721,7 +717,7 @@ def collect_vars(self, node: Alt) -> Dict[Optional[str], Optional[str]]: return types def add_var(self, node: NamedItem) -> Tuple[Optional[str], Optional[str]]: - call = self.callmakervisitor.visit(node.item) + call = self.callmakervisitor.generate_call(node.item) name = node.name if node.name else call.assigned_variable if name is not None: name = self.dedupe(name) diff --git a/Tools/peg_generator/pegen/parser_generator.py b/Tools/peg_generator/pegen/parser_generator.py index 689022b12da20..364eccb84f479 100644 --- a/Tools/peg_generator/pegen/parser_generator.py +++ b/Tools/peg_generator/pegen/parser_generator.py @@ -27,7 +27,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> None: # TODO: Add line/col info to (leaf) nodes raise GrammarError(f"Dangling reference to rule {node.value!r}") - def visit_NamedItem(self, node: NameLeaf) -> None: + def visit_NamedItem(self, node: NamedItem) -> None: if node.name and node.name.startswith("_"): raise GrammarError(f"Variable names cannot start with underscore: '{node.name}'") self.visit(node.item) @@ -57,7 +57,7 @@ def __init__(self, grammar: Grammar, tokens: Dict[int, str], file: Optional[IO[T self.all_rules: Dict[str, Rule] = {} # Rules + temporal rules self._local_variable_stack: List[List[str]] = [] - def validate_rule_names(self): + def validate_rule_names(self) -> None: for rule in self.rules: if rule.startswith("_"): raise GrammarError(f"Rule names cannot start with underscore: '{rule}'") From webhook-mailer at python.org Thu May 21 16:42:02 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Thu, 21 May 2020 20:42:02 -0000 Subject: [Python-checkins] bpo-40176: Improve error messages for trailing comma on from import (GH-20294) Message-ID: https://github.com/python/cpython/commit/72e0aa2fd2b9c6da2caa5a9ef54f6495fc2890b0 commit: 72e0aa2fd2b9c6da2caa5a9ef54f6495fc2890b0 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-21T21:41:58+01:00 summary: bpo-40176: Improve error messages for trailing comma on from import (GH-20294) files: M Grammar/python.gram M Lib/test/test_syntax.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 40e7818d49602..9bf2697a7e2d3 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -134,8 +134,9 @@ import_from[stmt_ty]: _Py_ImportFrom(NULL, b, _PyPegen_seq_count_dots(a), EXTRA) } import_from_targets[asdl_seq*]: | '(' a=import_from_as_names [','] ')' { a } - | import_from_as_names + | import_from_as_names !',' | '*' { _PyPegen_singleton_seq(p, CHECK(_PyPegen_alias_for_star(p))) } + | invalid_import_from_targets import_from_as_names[asdl_seq*]: | a=','.import_from_as_name+ { a } import_from_as_name[alias_ty]: @@ -670,3 +671,6 @@ invalid_double_type_comments: invalid_del_target: | a=star_expression &del_target_end { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot delete %s", _PyPegen_get_expr_name(a)) } +invalid_import_from_targets: + | import_from_as_names ',' { + RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") } diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index a82b444b67a27..6d9c4e4b55e1c 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -641,6 +641,14 @@ Traceback (most recent call last): SyntaxError: cannot assign to f-string expression +>>> from t import x, +Traceback (most recent call last): +SyntaxError: trailing comma not allowed without surrounding parentheses + +>>> from t import x,y, +Traceback (most recent call last): +SyntaxError: trailing comma not allowed without surrounding parentheses + Corner-cases that used to fail to raise the correct error: >>> def f(*, x=lambda __debug__:0): pass diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 286f72111e351..75aefa4d89365 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -221,154 +221,155 @@ static KeywordToken *reserved_keywords[] = { #define invalid_lambda_star_etc_type 1150 #define invalid_double_type_comments_type 1151 #define invalid_del_target_type 1152 -#define _loop0_1_type 1153 -#define _loop0_2_type 1154 -#define _loop0_4_type 1155 -#define _gather_3_type 1156 -#define _loop0_6_type 1157 -#define _gather_5_type 1158 -#define _loop0_8_type 1159 -#define _gather_7_type 1160 -#define _loop0_10_type 1161 -#define _gather_9_type 1162 -#define _loop1_11_type 1163 -#define _loop0_13_type 1164 -#define _gather_12_type 1165 -#define _tmp_14_type 1166 -#define _tmp_15_type 1167 -#define _tmp_16_type 1168 -#define _tmp_17_type 1169 -#define _tmp_18_type 1170 -#define _tmp_19_type 1171 -#define _tmp_20_type 1172 -#define _tmp_21_type 1173 -#define _loop1_22_type 1174 -#define _tmp_23_type 1175 -#define _tmp_24_type 1176 -#define _loop0_26_type 1177 -#define _gather_25_type 1178 -#define _loop0_28_type 1179 -#define _gather_27_type 1180 -#define _tmp_29_type 1181 -#define _loop0_30_type 1182 -#define _loop1_31_type 1183 -#define _loop0_33_type 1184 -#define _gather_32_type 1185 -#define _tmp_34_type 1186 -#define _loop0_36_type 1187 -#define _gather_35_type 1188 -#define _tmp_37_type 1189 -#define _loop0_39_type 1190 -#define _gather_38_type 1191 -#define _loop0_41_type 1192 -#define _gather_40_type 1193 -#define _loop0_43_type 1194 -#define _gather_42_type 1195 -#define _loop0_45_type 1196 -#define _gather_44_type 1197 -#define _tmp_46_type 1198 -#define _loop1_47_type 1199 -#define _tmp_48_type 1200 -#define _tmp_49_type 1201 -#define _tmp_50_type 1202 -#define _tmp_51_type 1203 -#define _tmp_52_type 1204 -#define _loop0_53_type 1205 -#define _loop0_54_type 1206 -#define _loop0_55_type 1207 -#define _loop1_56_type 1208 -#define _loop0_57_type 1209 -#define _loop1_58_type 1210 -#define _loop1_59_type 1211 -#define _loop1_60_type 1212 -#define _loop0_61_type 1213 -#define _loop1_62_type 1214 -#define _loop0_63_type 1215 -#define _loop1_64_type 1216 -#define _loop0_65_type 1217 -#define _loop1_66_type 1218 -#define _loop1_67_type 1219 -#define _tmp_68_type 1220 -#define _loop0_70_type 1221 -#define _gather_69_type 1222 -#define _loop1_71_type 1223 -#define _loop0_73_type 1224 -#define _gather_72_type 1225 -#define _loop1_74_type 1226 -#define _loop0_75_type 1227 -#define _loop0_76_type 1228 -#define _loop0_77_type 1229 -#define _loop1_78_type 1230 -#define _loop0_79_type 1231 -#define _loop1_80_type 1232 -#define _loop1_81_type 1233 -#define _loop1_82_type 1234 -#define _loop0_83_type 1235 -#define _loop1_84_type 1236 -#define _loop0_85_type 1237 -#define _loop1_86_type 1238 -#define _loop0_87_type 1239 -#define _loop1_88_type 1240 -#define _loop1_89_type 1241 -#define _loop1_90_type 1242 -#define _loop1_91_type 1243 -#define _tmp_92_type 1244 -#define _loop0_94_type 1245 -#define _gather_93_type 1246 -#define _tmp_95_type 1247 -#define _tmp_96_type 1248 -#define _tmp_97_type 1249 -#define _tmp_98_type 1250 -#define _loop1_99_type 1251 -#define _tmp_100_type 1252 -#define _tmp_101_type 1253 -#define _loop0_103_type 1254 -#define _gather_102_type 1255 -#define _loop1_104_type 1256 -#define _loop0_105_type 1257 -#define _loop0_106_type 1258 -#define _tmp_107_type 1259 -#define _tmp_108_type 1260 -#define _loop0_110_type 1261 -#define _gather_109_type 1262 -#define _loop0_112_type 1263 -#define _gather_111_type 1264 -#define _loop0_114_type 1265 -#define _gather_113_type 1266 -#define _loop0_116_type 1267 -#define _gather_115_type 1268 -#define _loop0_117_type 1269 -#define _loop0_119_type 1270 -#define _gather_118_type 1271 -#define _tmp_120_type 1272 -#define _loop0_122_type 1273 -#define _gather_121_type 1274 -#define _loop0_124_type 1275 -#define _gather_123_type 1276 -#define _tmp_125_type 1277 -#define _loop0_126_type 1278 -#define _tmp_127_type 1279 -#define _tmp_128_type 1280 -#define _tmp_129_type 1281 -#define _tmp_130_type 1282 -#define _loop0_131_type 1283 -#define _tmp_132_type 1284 -#define _tmp_133_type 1285 -#define _tmp_134_type 1286 -#define _tmp_135_type 1287 -#define _tmp_136_type 1288 -#define _tmp_137_type 1289 -#define _tmp_138_type 1290 -#define _tmp_139_type 1291 -#define _tmp_140_type 1292 -#define _tmp_141_type 1293 -#define _tmp_142_type 1294 -#define _tmp_143_type 1295 -#define _tmp_144_type 1296 -#define _tmp_145_type 1297 -#define _loop1_146_type 1298 -#define _tmp_147_type 1299 -#define _tmp_148_type 1300 +#define invalid_import_from_targets_type 1153 +#define _loop0_1_type 1154 +#define _loop0_2_type 1155 +#define _loop0_4_type 1156 +#define _gather_3_type 1157 +#define _loop0_6_type 1158 +#define _gather_5_type 1159 +#define _loop0_8_type 1160 +#define _gather_7_type 1161 +#define _loop0_10_type 1162 +#define _gather_9_type 1163 +#define _loop1_11_type 1164 +#define _loop0_13_type 1165 +#define _gather_12_type 1166 +#define _tmp_14_type 1167 +#define _tmp_15_type 1168 +#define _tmp_16_type 1169 +#define _tmp_17_type 1170 +#define _tmp_18_type 1171 +#define _tmp_19_type 1172 +#define _tmp_20_type 1173 +#define _tmp_21_type 1174 +#define _loop1_22_type 1175 +#define _tmp_23_type 1176 +#define _tmp_24_type 1177 +#define _loop0_26_type 1178 +#define _gather_25_type 1179 +#define _loop0_28_type 1180 +#define _gather_27_type 1181 +#define _tmp_29_type 1182 +#define _loop0_30_type 1183 +#define _loop1_31_type 1184 +#define _loop0_33_type 1185 +#define _gather_32_type 1186 +#define _tmp_34_type 1187 +#define _loop0_36_type 1188 +#define _gather_35_type 1189 +#define _tmp_37_type 1190 +#define _loop0_39_type 1191 +#define _gather_38_type 1192 +#define _loop0_41_type 1193 +#define _gather_40_type 1194 +#define _loop0_43_type 1195 +#define _gather_42_type 1196 +#define _loop0_45_type 1197 +#define _gather_44_type 1198 +#define _tmp_46_type 1199 +#define _loop1_47_type 1200 +#define _tmp_48_type 1201 +#define _tmp_49_type 1202 +#define _tmp_50_type 1203 +#define _tmp_51_type 1204 +#define _tmp_52_type 1205 +#define _loop0_53_type 1206 +#define _loop0_54_type 1207 +#define _loop0_55_type 1208 +#define _loop1_56_type 1209 +#define _loop0_57_type 1210 +#define _loop1_58_type 1211 +#define _loop1_59_type 1212 +#define _loop1_60_type 1213 +#define _loop0_61_type 1214 +#define _loop1_62_type 1215 +#define _loop0_63_type 1216 +#define _loop1_64_type 1217 +#define _loop0_65_type 1218 +#define _loop1_66_type 1219 +#define _loop1_67_type 1220 +#define _tmp_68_type 1221 +#define _loop0_70_type 1222 +#define _gather_69_type 1223 +#define _loop1_71_type 1224 +#define _loop0_73_type 1225 +#define _gather_72_type 1226 +#define _loop1_74_type 1227 +#define _loop0_75_type 1228 +#define _loop0_76_type 1229 +#define _loop0_77_type 1230 +#define _loop1_78_type 1231 +#define _loop0_79_type 1232 +#define _loop1_80_type 1233 +#define _loop1_81_type 1234 +#define _loop1_82_type 1235 +#define _loop0_83_type 1236 +#define _loop1_84_type 1237 +#define _loop0_85_type 1238 +#define _loop1_86_type 1239 +#define _loop0_87_type 1240 +#define _loop1_88_type 1241 +#define _loop1_89_type 1242 +#define _loop1_90_type 1243 +#define _loop1_91_type 1244 +#define _tmp_92_type 1245 +#define _loop0_94_type 1246 +#define _gather_93_type 1247 +#define _tmp_95_type 1248 +#define _tmp_96_type 1249 +#define _tmp_97_type 1250 +#define _tmp_98_type 1251 +#define _loop1_99_type 1252 +#define _tmp_100_type 1253 +#define _tmp_101_type 1254 +#define _loop0_103_type 1255 +#define _gather_102_type 1256 +#define _loop1_104_type 1257 +#define _loop0_105_type 1258 +#define _loop0_106_type 1259 +#define _tmp_107_type 1260 +#define _tmp_108_type 1261 +#define _loop0_110_type 1262 +#define _gather_109_type 1263 +#define _loop0_112_type 1264 +#define _gather_111_type 1265 +#define _loop0_114_type 1266 +#define _gather_113_type 1267 +#define _loop0_116_type 1268 +#define _gather_115_type 1269 +#define _loop0_117_type 1270 +#define _loop0_119_type 1271 +#define _gather_118_type 1272 +#define _tmp_120_type 1273 +#define _loop0_122_type 1274 +#define _gather_121_type 1275 +#define _loop0_124_type 1276 +#define _gather_123_type 1277 +#define _tmp_125_type 1278 +#define _loop0_126_type 1279 +#define _tmp_127_type 1280 +#define _tmp_128_type 1281 +#define _tmp_129_type 1282 +#define _tmp_130_type 1283 +#define _loop0_131_type 1284 +#define _tmp_132_type 1285 +#define _tmp_133_type 1286 +#define _tmp_134_type 1287 +#define _tmp_135_type 1288 +#define _tmp_136_type 1289 +#define _tmp_137_type 1290 +#define _tmp_138_type 1291 +#define _tmp_139_type 1292 +#define _tmp_140_type 1293 +#define _tmp_141_type 1294 +#define _tmp_142_type 1295 +#define _tmp_143_type 1296 +#define _tmp_144_type 1297 +#define _tmp_145_type 1298 +#define _loop1_146_type 1299 +#define _tmp_147_type 1300 +#define _tmp_148_type 1301 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -523,6 +524,7 @@ static void *invalid_star_etc_rule(Parser *p); static void *invalid_lambda_star_etc_rule(Parser *p); static void *invalid_double_type_comments_rule(Parser *p); static void *invalid_del_target_rule(Parser *p); +static void *invalid_import_from_targets_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); static asdl_seq *_loop0_4_rule(Parser *p); @@ -2592,7 +2594,11 @@ import_from_rule(Parser *p) return _res; } -// import_from_targets: '(' import_from_as_names ','? ')' | import_from_as_names | '*' +// import_from_targets: +// | '(' import_from_as_names ','? ')' +// | import_from_as_names !',' +// | '*' +// | invalid_import_from_targets static asdl_seq* import_from_targets_rule(Parser *p) { @@ -2629,13 +2635,15 @@ import_from_targets_rule(Parser *p) } p->mark = _mark; } - { // import_from_as_names + { // import_from_as_names !',' if (p->error_indicator) { return NULL; } asdl_seq* import_from_as_names_var; if ( (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { _res = import_from_as_names_var; @@ -2661,6 +2669,20 @@ import_from_targets_rule(Parser *p) } p->mark = _mark; } + { // invalid_import_from_targets + if (p->error_indicator) { + return NULL; + } + void *invalid_import_from_targets_var; + if ( + (invalid_import_from_targets_var = invalid_import_from_targets_rule(p)) // invalid_import_from_targets + ) + { + _res = invalid_import_from_targets_var; + goto done; + } + p->mark = _mark; + } _res = NULL; done: return _res; @@ -12213,6 +12235,41 @@ invalid_del_target_rule(Parser *p) return _res; } +// invalid_import_from_targets: import_from_as_names ',' +static void * +invalid_import_from_targets_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // import_from_as_names ',' + if (p->error_indicator) { + return NULL; + } + Token * _literal; + asdl_seq* import_from_as_names_var; + if ( + (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + _res = RAISE_SYNTAX_ERROR ( "trailing comma not allowed without surrounding parentheses" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; +} + // _loop0_1: NEWLINE static asdl_seq * _loop0_1_rule(Parser *p) From webhook-mailer at python.org Thu May 21 18:40:05 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Thu, 21 May 2020 22:40:05 -0000 Subject: [Python-checkins] bpo-40715: Reject dict unpacking on dict comprehensions (GH-20292) Message-ID: https://github.com/python/cpython/commit/b8a65ec1d3d4660d0ee38a9765d98f5cdcabdef5 commit: b8a65ec1d3d4660d0ee38a9765d98f5cdcabdef5 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-21T23:39:56+01:00 summary: bpo-40715: Reject dict unpacking on dict comprehensions (GH-20292) Co-authored-by: Lysandros Nikolaou Co-authored-by: Pablo Galindo files: M Grammar/python.gram M Lib/test/test_unpack_ex.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 9bf2697a7e2d3..a771abf46fd25 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -499,14 +499,16 @@ setcomp[expr_ty]: | '{' a=expression b=for_if_clauses '}' { _Py_SetComp(a, b, EXTRA) } | invalid_comprehension dict[expr_ty]: - | '{' a=[kvpairs] '}' { _Py_Dict(CHECK(_PyPegen_get_keys(p, a)), - CHECK(_PyPegen_get_values(p, a)), EXTRA) } + | '{' a=[double_starred_kvpairs] '}' { + _Py_Dict(CHECK(_PyPegen_get_keys(p, a)), CHECK(_PyPegen_get_values(p, a)), EXTRA) } dictcomp[expr_ty]: | '{' a=kvpair b=for_if_clauses '}' { _Py_DictComp(a->key, a->value, b, EXTRA) } -kvpairs[asdl_seq*]: a=','.kvpair+ [','] { a } -kvpair[KeyValuePair*]: + | invalid_dict_comprehension +double_starred_kvpairs[asdl_seq*]: a=','.double_starred_kvpair+ [','] { a } +double_starred_kvpair[KeyValuePair*]: | '**' a=bitwise_or { _PyPegen_key_value_pair(p, NULL, a) } - | a=expression ':' b=expression { _PyPegen_key_value_pair(p, a, b) } + | kvpair +kvpair[KeyValuePair*]: a=expression ':' b=expression { _PyPegen_key_value_pair(p, a, b) } for_if_clauses[asdl_seq*]: | for_if_clause+ for_if_clause[comprehension_ty]: @@ -657,6 +659,9 @@ invalid_block: invalid_comprehension: | ('[' | '(' | '{') a=starred_expression for_if_clauses { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "iterable unpacking cannot be used in comprehension") } +invalid_dict_comprehension: + | '{' a='**' bitwise_or for_if_clauses '}' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "dict unpacking cannot be used in dict comprehension") } invalid_parameters: | param_no_default* (slash_with_default | param_with_default+) param_no_default { RAISE_SYNTAX_ERROR("non-default argument follows default argument") } diff --git a/Lib/test/test_unpack_ex.py b/Lib/test/test_unpack_ex.py index 2f53457b232a6..fcc93829cc3b8 100644 --- a/Lib/test/test_unpack_ex.py +++ b/Lib/test/test_unpack_ex.py @@ -158,6 +158,11 @@ ... SyntaxError: iterable unpacking cannot be used in comprehension + >>> {**{} for a in [1]} + Traceback (most recent call last): + ... + SyntaxError: dict unpacking cannot be used in dict comprehension + # Pegen is better here. # Generator expression in function arguments diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 75aefa4d89365..b4756319783e2 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -184,192 +184,194 @@ static KeywordToken *reserved_keywords[] = { #define setcomp_type 1113 #define dict_type 1114 #define dictcomp_type 1115 -#define kvpairs_type 1116 -#define kvpair_type 1117 -#define for_if_clauses_type 1118 -#define for_if_clause_type 1119 -#define yield_expr_type 1120 -#define arguments_type 1121 -#define args_type 1122 -#define kwargs_type 1123 -#define starred_expression_type 1124 -#define kwarg_or_starred_type 1125 -#define kwarg_or_double_starred_type 1126 -#define star_targets_type 1127 -#define star_targets_seq_type 1128 -#define star_target_type 1129 -#define star_atom_type 1130 -#define single_target_type 1131 -#define single_subscript_attribute_target_type 1132 -#define del_targets_type 1133 -#define del_target_type 1134 -#define del_t_atom_type 1135 -#define del_target_end_type 1136 -#define targets_type 1137 -#define target_type 1138 -#define t_primary_type 1139 // Left-recursive -#define t_lookahead_type 1140 -#define t_atom_type 1141 -#define incorrect_arguments_type 1142 -#define invalid_kwarg_type 1143 -#define invalid_named_expression_type 1144 -#define invalid_assignment_type 1145 -#define invalid_block_type 1146 -#define invalid_comprehension_type 1147 -#define invalid_parameters_type 1148 -#define invalid_star_etc_type 1149 -#define invalid_lambda_star_etc_type 1150 -#define invalid_double_type_comments_type 1151 -#define invalid_del_target_type 1152 -#define invalid_import_from_targets_type 1153 -#define _loop0_1_type 1154 -#define _loop0_2_type 1155 -#define _loop0_4_type 1156 -#define _gather_3_type 1157 -#define _loop0_6_type 1158 -#define _gather_5_type 1159 -#define _loop0_8_type 1160 -#define _gather_7_type 1161 -#define _loop0_10_type 1162 -#define _gather_9_type 1163 -#define _loop1_11_type 1164 -#define _loop0_13_type 1165 -#define _gather_12_type 1166 -#define _tmp_14_type 1167 -#define _tmp_15_type 1168 -#define _tmp_16_type 1169 -#define _tmp_17_type 1170 -#define _tmp_18_type 1171 -#define _tmp_19_type 1172 -#define _tmp_20_type 1173 -#define _tmp_21_type 1174 -#define _loop1_22_type 1175 -#define _tmp_23_type 1176 -#define _tmp_24_type 1177 -#define _loop0_26_type 1178 -#define _gather_25_type 1179 -#define _loop0_28_type 1180 -#define _gather_27_type 1181 -#define _tmp_29_type 1182 -#define _loop0_30_type 1183 -#define _loop1_31_type 1184 -#define _loop0_33_type 1185 -#define _gather_32_type 1186 -#define _tmp_34_type 1187 -#define _loop0_36_type 1188 -#define _gather_35_type 1189 -#define _tmp_37_type 1190 -#define _loop0_39_type 1191 -#define _gather_38_type 1192 -#define _loop0_41_type 1193 -#define _gather_40_type 1194 -#define _loop0_43_type 1195 -#define _gather_42_type 1196 -#define _loop0_45_type 1197 -#define _gather_44_type 1198 -#define _tmp_46_type 1199 -#define _loop1_47_type 1200 -#define _tmp_48_type 1201 -#define _tmp_49_type 1202 -#define _tmp_50_type 1203 -#define _tmp_51_type 1204 -#define _tmp_52_type 1205 -#define _loop0_53_type 1206 -#define _loop0_54_type 1207 -#define _loop0_55_type 1208 -#define _loop1_56_type 1209 -#define _loop0_57_type 1210 -#define _loop1_58_type 1211 -#define _loop1_59_type 1212 -#define _loop1_60_type 1213 -#define _loop0_61_type 1214 -#define _loop1_62_type 1215 -#define _loop0_63_type 1216 -#define _loop1_64_type 1217 -#define _loop0_65_type 1218 -#define _loop1_66_type 1219 -#define _loop1_67_type 1220 -#define _tmp_68_type 1221 -#define _loop0_70_type 1222 -#define _gather_69_type 1223 -#define _loop1_71_type 1224 -#define _loop0_73_type 1225 -#define _gather_72_type 1226 -#define _loop1_74_type 1227 -#define _loop0_75_type 1228 -#define _loop0_76_type 1229 -#define _loop0_77_type 1230 -#define _loop1_78_type 1231 -#define _loop0_79_type 1232 -#define _loop1_80_type 1233 -#define _loop1_81_type 1234 -#define _loop1_82_type 1235 -#define _loop0_83_type 1236 -#define _loop1_84_type 1237 -#define _loop0_85_type 1238 -#define _loop1_86_type 1239 -#define _loop0_87_type 1240 -#define _loop1_88_type 1241 -#define _loop1_89_type 1242 -#define _loop1_90_type 1243 -#define _loop1_91_type 1244 -#define _tmp_92_type 1245 -#define _loop0_94_type 1246 -#define _gather_93_type 1247 -#define _tmp_95_type 1248 -#define _tmp_96_type 1249 -#define _tmp_97_type 1250 -#define _tmp_98_type 1251 -#define _loop1_99_type 1252 -#define _tmp_100_type 1253 -#define _tmp_101_type 1254 -#define _loop0_103_type 1255 -#define _gather_102_type 1256 -#define _loop1_104_type 1257 -#define _loop0_105_type 1258 -#define _loop0_106_type 1259 -#define _tmp_107_type 1260 -#define _tmp_108_type 1261 -#define _loop0_110_type 1262 -#define _gather_109_type 1263 -#define _loop0_112_type 1264 -#define _gather_111_type 1265 -#define _loop0_114_type 1266 -#define _gather_113_type 1267 -#define _loop0_116_type 1268 -#define _gather_115_type 1269 -#define _loop0_117_type 1270 -#define _loop0_119_type 1271 -#define _gather_118_type 1272 -#define _tmp_120_type 1273 -#define _loop0_122_type 1274 -#define _gather_121_type 1275 -#define _loop0_124_type 1276 -#define _gather_123_type 1277 -#define _tmp_125_type 1278 -#define _loop0_126_type 1279 -#define _tmp_127_type 1280 -#define _tmp_128_type 1281 -#define _tmp_129_type 1282 -#define _tmp_130_type 1283 -#define _loop0_131_type 1284 -#define _tmp_132_type 1285 -#define _tmp_133_type 1286 -#define _tmp_134_type 1287 -#define _tmp_135_type 1288 -#define _tmp_136_type 1289 -#define _tmp_137_type 1290 -#define _tmp_138_type 1291 -#define _tmp_139_type 1292 -#define _tmp_140_type 1293 -#define _tmp_141_type 1294 -#define _tmp_142_type 1295 -#define _tmp_143_type 1296 -#define _tmp_144_type 1297 -#define _tmp_145_type 1298 -#define _loop1_146_type 1299 -#define _tmp_147_type 1300 -#define _tmp_148_type 1301 +#define double_starred_kvpairs_type 1116 +#define double_starred_kvpair_type 1117 +#define kvpair_type 1118 +#define for_if_clauses_type 1119 +#define for_if_clause_type 1120 +#define yield_expr_type 1121 +#define arguments_type 1122 +#define args_type 1123 +#define kwargs_type 1124 +#define starred_expression_type 1125 +#define kwarg_or_starred_type 1126 +#define kwarg_or_double_starred_type 1127 +#define star_targets_type 1128 +#define star_targets_seq_type 1129 +#define star_target_type 1130 +#define star_atom_type 1131 +#define single_target_type 1132 +#define single_subscript_attribute_target_type 1133 +#define del_targets_type 1134 +#define del_target_type 1135 +#define del_t_atom_type 1136 +#define del_target_end_type 1137 +#define targets_type 1138 +#define target_type 1139 +#define t_primary_type 1140 // Left-recursive +#define t_lookahead_type 1141 +#define t_atom_type 1142 +#define incorrect_arguments_type 1143 +#define invalid_kwarg_type 1144 +#define invalid_named_expression_type 1145 +#define invalid_assignment_type 1146 +#define invalid_block_type 1147 +#define invalid_comprehension_type 1148 +#define invalid_dict_comprehension_type 1149 +#define invalid_parameters_type 1150 +#define invalid_star_etc_type 1151 +#define invalid_lambda_star_etc_type 1152 +#define invalid_double_type_comments_type 1153 +#define invalid_del_target_type 1154 +#define invalid_import_from_targets_type 1155 +#define _loop0_1_type 1156 +#define _loop0_2_type 1157 +#define _loop0_4_type 1158 +#define _gather_3_type 1159 +#define _loop0_6_type 1160 +#define _gather_5_type 1161 +#define _loop0_8_type 1162 +#define _gather_7_type 1163 +#define _loop0_10_type 1164 +#define _gather_9_type 1165 +#define _loop1_11_type 1166 +#define _loop0_13_type 1167 +#define _gather_12_type 1168 +#define _tmp_14_type 1169 +#define _tmp_15_type 1170 +#define _tmp_16_type 1171 +#define _tmp_17_type 1172 +#define _tmp_18_type 1173 +#define _tmp_19_type 1174 +#define _tmp_20_type 1175 +#define _tmp_21_type 1176 +#define _loop1_22_type 1177 +#define _tmp_23_type 1178 +#define _tmp_24_type 1179 +#define _loop0_26_type 1180 +#define _gather_25_type 1181 +#define _loop0_28_type 1182 +#define _gather_27_type 1183 +#define _tmp_29_type 1184 +#define _loop0_30_type 1185 +#define _loop1_31_type 1186 +#define _loop0_33_type 1187 +#define _gather_32_type 1188 +#define _tmp_34_type 1189 +#define _loop0_36_type 1190 +#define _gather_35_type 1191 +#define _tmp_37_type 1192 +#define _loop0_39_type 1193 +#define _gather_38_type 1194 +#define _loop0_41_type 1195 +#define _gather_40_type 1196 +#define _loop0_43_type 1197 +#define _gather_42_type 1198 +#define _loop0_45_type 1199 +#define _gather_44_type 1200 +#define _tmp_46_type 1201 +#define _loop1_47_type 1202 +#define _tmp_48_type 1203 +#define _tmp_49_type 1204 +#define _tmp_50_type 1205 +#define _tmp_51_type 1206 +#define _tmp_52_type 1207 +#define _loop0_53_type 1208 +#define _loop0_54_type 1209 +#define _loop0_55_type 1210 +#define _loop1_56_type 1211 +#define _loop0_57_type 1212 +#define _loop1_58_type 1213 +#define _loop1_59_type 1214 +#define _loop1_60_type 1215 +#define _loop0_61_type 1216 +#define _loop1_62_type 1217 +#define _loop0_63_type 1218 +#define _loop1_64_type 1219 +#define _loop0_65_type 1220 +#define _loop1_66_type 1221 +#define _loop1_67_type 1222 +#define _tmp_68_type 1223 +#define _loop0_70_type 1224 +#define _gather_69_type 1225 +#define _loop1_71_type 1226 +#define _loop0_73_type 1227 +#define _gather_72_type 1228 +#define _loop1_74_type 1229 +#define _loop0_75_type 1230 +#define _loop0_76_type 1231 +#define _loop0_77_type 1232 +#define _loop1_78_type 1233 +#define _loop0_79_type 1234 +#define _loop1_80_type 1235 +#define _loop1_81_type 1236 +#define _loop1_82_type 1237 +#define _loop0_83_type 1238 +#define _loop1_84_type 1239 +#define _loop0_85_type 1240 +#define _loop1_86_type 1241 +#define _loop0_87_type 1242 +#define _loop1_88_type 1243 +#define _loop1_89_type 1244 +#define _loop1_90_type 1245 +#define _loop1_91_type 1246 +#define _tmp_92_type 1247 +#define _loop0_94_type 1248 +#define _gather_93_type 1249 +#define _tmp_95_type 1250 +#define _tmp_96_type 1251 +#define _tmp_97_type 1252 +#define _tmp_98_type 1253 +#define _loop1_99_type 1254 +#define _tmp_100_type 1255 +#define _tmp_101_type 1256 +#define _loop0_103_type 1257 +#define _gather_102_type 1258 +#define _loop1_104_type 1259 +#define _loop0_105_type 1260 +#define _loop0_106_type 1261 +#define _tmp_107_type 1262 +#define _tmp_108_type 1263 +#define _loop0_110_type 1264 +#define _gather_109_type 1265 +#define _loop0_112_type 1266 +#define _gather_111_type 1267 +#define _loop0_114_type 1268 +#define _gather_113_type 1269 +#define _loop0_116_type 1270 +#define _gather_115_type 1271 +#define _loop0_117_type 1272 +#define _loop0_119_type 1273 +#define _gather_118_type 1274 +#define _tmp_120_type 1275 +#define _loop0_122_type 1276 +#define _gather_121_type 1277 +#define _loop0_124_type 1278 +#define _gather_123_type 1279 +#define _tmp_125_type 1280 +#define _loop0_126_type 1281 +#define _tmp_127_type 1282 +#define _tmp_128_type 1283 +#define _tmp_129_type 1284 +#define _tmp_130_type 1285 +#define _loop0_131_type 1286 +#define _tmp_132_type 1287 +#define _tmp_133_type 1288 +#define _tmp_134_type 1289 +#define _tmp_135_type 1290 +#define _tmp_136_type 1291 +#define _tmp_137_type 1292 +#define _tmp_138_type 1293 +#define _tmp_139_type 1294 +#define _tmp_140_type 1295 +#define _tmp_141_type 1296 +#define _tmp_142_type 1297 +#define _tmp_143_type 1298 +#define _tmp_144_type 1299 +#define _tmp_145_type 1300 +#define _loop1_146_type 1301 +#define _tmp_147_type 1302 +#define _tmp_148_type 1303 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -487,7 +489,8 @@ static expr_ty set_rule(Parser *p); static expr_ty setcomp_rule(Parser *p); static expr_ty dict_rule(Parser *p); static expr_ty dictcomp_rule(Parser *p); -static asdl_seq* kvpairs_rule(Parser *p); +static asdl_seq* double_starred_kvpairs_rule(Parser *p); +static KeyValuePair* double_starred_kvpair_rule(Parser *p); static KeyValuePair* kvpair_rule(Parser *p); static asdl_seq* for_if_clauses_rule(Parser *p); static comprehension_ty for_if_clause_rule(Parser *p); @@ -519,6 +522,7 @@ static void *invalid_named_expression_rule(Parser *p); static void *invalid_assignment_rule(Parser *p); static void *invalid_block_rule(Parser *p); static void *invalid_comprehension_rule(Parser *p); +static void *invalid_dict_comprehension_rule(Parser *p); static void *invalid_parameters_rule(Parser *p); static void *invalid_star_etc_rule(Parser *p); static void *invalid_lambda_star_etc_rule(Parser *p); @@ -9284,7 +9288,7 @@ setcomp_rule(Parser *p) return _res; } -// dict: '{' kvpairs? '}' +// dict: '{' double_starred_kvpairs? '}' static expr_ty dict_rule(Parser *p) { @@ -9301,7 +9305,7 @@ dict_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '{' kvpairs? '}' + { // '{' double_starred_kvpairs? '}' if (p->error_indicator) { return NULL; } @@ -9311,7 +9315,7 @@ dict_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = kvpairs_rule(p), 1) // kvpairs? + (a = double_starred_kvpairs_rule(p), 1) // double_starred_kvpairs? && (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) @@ -9338,7 +9342,7 @@ dict_rule(Parser *p) return _res; } -// dictcomp: '{' kvpair for_if_clauses '}' +// dictcomp: '{' kvpair for_if_clauses '}' | invalid_dict_comprehension static expr_ty dictcomp_rule(Parser *p) { @@ -9390,21 +9394,35 @@ dictcomp_rule(Parser *p) } p->mark = _mark; } + { // invalid_dict_comprehension + if (p->error_indicator) { + return NULL; + } + void *invalid_dict_comprehension_var; + if ( + (invalid_dict_comprehension_var = invalid_dict_comprehension_rule(p)) // invalid_dict_comprehension + ) + { + _res = invalid_dict_comprehension_var; + goto done; + } + p->mark = _mark; + } _res = NULL; done: return _res; } -// kvpairs: ','.kvpair+ ','? +// double_starred_kvpairs: ','.double_starred_kvpair+ ','? static asdl_seq* -kvpairs_rule(Parser *p) +double_starred_kvpairs_rule(Parser *p) { if (p->error_indicator) { return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; - { // ','.kvpair+ ','? + { // ','.double_starred_kvpair+ ','? if (p->error_indicator) { return NULL; } @@ -9412,7 +9430,7 @@ kvpairs_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; if ( - (a = _gather_102_rule(p)) // ','.kvpair+ + (a = _gather_102_rule(p)) // ','.double_starred_kvpair+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -9431,9 +9449,9 @@ kvpairs_rule(Parser *p) return _res; } -// kvpair: '**' bitwise_or | expression ':' expression +// double_starred_kvpair: '**' bitwise_or | kvpair static KeyValuePair* -kvpair_rule(Parser *p) +double_starred_kvpair_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -9461,6 +9479,34 @@ kvpair_rule(Parser *p) } p->mark = _mark; } + { // kvpair + if (p->error_indicator) { + return NULL; + } + KeyValuePair* kvpair_var; + if ( + (kvpair_var = kvpair_rule(p)) // kvpair + ) + { + _res = kvpair_var; + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; +} + +// kvpair: expression ':' expression +static KeyValuePair* +kvpair_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + KeyValuePair* _res = NULL; + int _mark = p->mark; { // expression ':' expression if (p->error_indicator) { return NULL; @@ -12024,6 +12070,50 @@ invalid_comprehension_rule(Parser *p) return _res; } +// invalid_dict_comprehension: '{' '**' bitwise_or for_if_clauses '}' +static void * +invalid_dict_comprehension_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '{' '**' bitwise_or for_if_clauses '}' + if (p->error_indicator) { + return NULL; + } + Token * _literal; + Token * _literal_1; + Token * a; + expr_ty bitwise_or_var; + asdl_seq* for_if_clauses_var; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = _PyPegen_expect_token(p, 35)) // token='**' + && + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "dict unpacking cannot be used in dict comprehension" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; +} + // invalid_parameters: // | param_no_default* (slash_with_default | param_with_default+) param_no_default static void * @@ -17336,7 +17426,7 @@ _tmp_101_rule(Parser *p) return _res; } -// _loop0_103: ',' kvpair +// _loop0_103: ',' double_starred_kvpair static asdl_seq * _loop0_103_rule(Parser *p) { @@ -17354,7 +17444,7 @@ _loop0_103_rule(Parser *p) } ssize_t _children_capacity = 1; ssize_t _n = 0; - { // ',' kvpair + { // ',' double_starred_kvpair if (p->error_indicator) { return NULL; } @@ -17363,7 +17453,7 @@ _loop0_103_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kvpair_rule(p)) // kvpair + (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair ) { _res = elem; @@ -17400,7 +17490,7 @@ _loop0_103_rule(Parser *p) return _seq; } -// _gather_102: kvpair _loop0_103 +// _gather_102: double_starred_kvpair _loop0_103 static asdl_seq * _gather_102_rule(Parser *p) { @@ -17409,14 +17499,14 @@ _gather_102_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kvpair _loop0_103 + { // double_starred_kvpair _loop0_103 if (p->error_indicator) { return NULL; } KeyValuePair* elem; asdl_seq * seq; if ( - (elem = kvpair_rule(p)) // kvpair + (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair && (seq = _loop0_103_rule(p)) // _loop0_103 ) From webhook-mailer at python.org Thu May 21 18:45:29 2020 From: webhook-mailer at python.org (Andre Delfino) Date: Thu, 21 May 2020 22:45:29 -0000 Subject: [Python-checkins] [doc] Remove references to obsolete BuildApplet on macOS. (GH-20023) Message-ID: https://github.com/python/cpython/commit/7864f11cdf12807555d62c7a132c191eb41ecc02 commit: 7864f11cdf12807555d62c7a132c191eb41ecc02 branch: master author: Andre Delfino committer: GitHub date: 2020-05-21T18:45:23-04:00 summary: [doc] Remove references to obsolete BuildApplet on macOS. (GH-20023) files: M Doc/using/mac.rst diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst index baf737ddaa917..ead71e1b079b3 100644 --- a/Doc/using/mac.rst +++ b/Doc/using/mac.rst @@ -27,9 +27,8 @@ What you get after installing is a number of things: * A :file:`Python 3.9` folder in your :file:`Applications` folder. In here you find IDLE, the development environment that is a standard part of official - Python distributions; PythonLauncher, which handles double-clicking Python - scripts from the Finder; and the "Build Applet" tool, which allows you to - package Python scripts as standalone applications on your system. + Python distributions; and PythonLauncher, which handles double-clicking Python + scripts from the Finder. * A framework :file:`/Library/Frameworks/Python.framework`, which includes the Python executable and libraries. The installer adds this location to your shell @@ -159,11 +158,6 @@ https://riverbankcomputing.com/software/pyqt/intro. Distributing Python Applications on the Mac =========================================== -The "Build Applet" tool that is placed in the MacPython 3.6 folder is fine for -packaging small Python scripts on your own machine to run as a standard Mac -application. This tool, however, is not robust enough to distribute Python -applications to other users. - The standard tool for deploying standalone Python applications on the Mac is :program:`py2app`. More information on installing and using py2app can be found at http://undefined.org/python/#py2app. From webhook-mailer at python.org Thu May 21 19:34:54 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 21 May 2020 23:34:54 -0000 Subject: [Python-checkins] [doc] Remove references to obsolete BuildApplet on macOS. (GH-20023) Message-ID: https://github.com/python/cpython/commit/822efa5695b5ba6c2316c1400e4e9ec2546f7ea5 commit: 822efa5695b5ba6c2316c1400e4e9ec2546f7ea5 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-21T16:34:46-07:00 summary: [doc] Remove references to obsolete BuildApplet on macOS. (GH-20023) (cherry picked from commit 7864f11cdf12807555d62c7a132c191eb41ecc02) Co-authored-by: Andre Delfino files: M Doc/using/mac.rst diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst index b411fa282049a..0253625f369d8 100644 --- a/Doc/using/mac.rst +++ b/Doc/using/mac.rst @@ -27,9 +27,8 @@ What you get after installing is a number of things: * A :file:`Python 3.8` folder in your :file:`Applications` folder. In here you find IDLE, the development environment that is a standard part of official - Python distributions; PythonLauncher, which handles double-clicking Python - scripts from the Finder; and the "Build Applet" tool, which allows you to - package Python scripts as standalone applications on your system. + Python distributions; and PythonLauncher, which handles double-clicking Python + scripts from the Finder. * A framework :file:`/Library/Frameworks/Python.framework`, which includes the Python executable and libraries. The installer adds this location to your shell @@ -159,11 +158,6 @@ https://riverbankcomputing.com/software/pyqt/intro. Distributing Python Applications on the Mac =========================================== -The "Build Applet" tool that is placed in the MacPython 3.6 folder is fine for -packaging small Python scripts on your own machine to run as a standard Mac -application. This tool, however, is not robust enough to distribute Python -applications to other users. - The standard tool for deploying standalone Python applications on the Mac is :program:`py2app`. More information on installing and using py2app can be found at http://undefined.org/python/#py2app. From webhook-mailer at python.org Thu May 21 19:35:08 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 21 May 2020 23:35:08 -0000 Subject: [Python-checkins] [3.7] [doc] Remove references to obsolete BuildApplet on macOS (GH-20023) (GH-20306) Message-ID: https://github.com/python/cpython/commit/3613bf07bd236ba50bc715dd407db0ee98ea739d commit: 3613bf07bd236ba50bc715dd407db0ee98ea739d branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-21T16:35:04-07:00 summary: [3.7] [doc] Remove references to obsolete BuildApplet on macOS (GH-20023) (GH-20306) (cherry picked from commit 7864f11cdf12807555d62c7a132c191eb41ecc02) Co-authored-by: Andre Delfino Automerge-Triggered-By: @ned-deily files: M Doc/using/mac.rst diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst index e685993b65d5c..ef5817d3734bf 100644 --- a/Doc/using/mac.rst +++ b/Doc/using/mac.rst @@ -27,9 +27,8 @@ What you get after installing is a number of things: * A :file:`Python 3.7` folder in your :file:`Applications` folder. In here you find IDLE, the development environment that is a standard part of official - Python distributions; PythonLauncher, which handles double-clicking Python - scripts from the Finder; and the "Build Applet" tool, which allows you to - package Python scripts as standalone applications on your system. + Python distributions; and PythonLauncher, which handles double-clicking Python + scripts from the Finder. * A framework :file:`/Library/Frameworks/Python.framework`, which includes the Python executable and libraries. The installer adds this location to your shell @@ -159,11 +158,6 @@ https://riverbankcomputing.com/software/pyqt/intro. Distributing Python Applications on the Mac =========================================== -The "Build Applet" tool that is placed in the MacPython 3.6 folder is fine for -packaging small Python scripts on your own machine to run as a standard Mac -application. This tool, however, is not robust enough to distribute Python -applications to other users. - The standard tool for deploying standalone Python applications on the Mac is :program:`py2app`. More information on installing and using py2app can be found at http://undefined.org/python/#py2app. From webhook-mailer at python.org Thu May 21 20:57:00 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Fri, 22 May 2020 00:57:00 -0000 Subject: [Python-checkins] bpo-40334: Produce better error messages for non-parenthesized genexps (GH-20153) Message-ID: https://github.com/python/cpython/commit/ae145833025b0156ee2a28219e3370f3b27b2a36 commit: ae145833025b0156ee2a28219e3370f3b27b2a36 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-22T01:56:52+01:00 summary: bpo-40334: Produce better error messages for non-parenthesized genexps (GH-20153) The error message, generated for a non-parenthesized generator expression in function calls, was still the generic `invalid syntax`, when the generator expression wasn't appearing as the first argument in the call. With this patch, even on input like `f(a, b, c for c in d, e)`, the correct error message gets produced. files: M Grammar/python.gram M Lib/test/test_syntax.py M Parser/pegen/parse.c M Parser/pegen/pegen.c M Parser/pegen/pegen.h diff --git a/Grammar/python.gram b/Grammar/python.gram index a771abf46fd25..19d9bb36fed5f 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -627,6 +627,9 @@ incorrect_arguments: | args ',' '*' { RAISE_SYNTAX_ERROR("iterable argument unpacking follows keyword argument unpacking") } | a=expression for_if_clauses ',' [args | expression for_if_clauses] { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "Generator expression must be parenthesized") } + | a=args for_if_clauses { _PyPegen_nonparen_genexp_in_call(p, a) } + | args ',' a=expression for_if_clauses { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "Generator expression must be parenthesized") } | a=args ',' args { _PyPegen_arguments_parsing_error(p, a) } invalid_kwarg: | a=expression '=' { diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 6d9c4e4b55e1c..4df5535b0053b 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -216,11 +216,9 @@ >>> f(x for x in L, **{}) Traceback (most recent call last): SyntaxError: Generator expression must be parenthesized - -# >>> f(L, x for x in L) -# Traceback (most recent call last): -# SyntaxError: Generator expression must be parenthesized - +>>> f(L, x for x in L) +Traceback (most recent call last): +SyntaxError: Generator expression must be parenthesized >>> f(x for x in L, y for y in L) Traceback (most recent call last): SyntaxError: Generator expression must be parenthesized diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index b4756319783e2..5dff77abc9fd7 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -11670,6 +11670,8 @@ t_atom_rule(Parser *p) // incorrect_arguments: // | args ',' '*' // | expression for_if_clauses ',' [args | expression for_if_clauses] +// | args for_if_clauses +// | args ',' expression for_if_clauses // | args ',' args static void * incorrect_arguments_rule(Parser *p) @@ -11731,6 +11733,54 @@ incorrect_arguments_rule(Parser *p) } p->mark = _mark; } + { // args for_if_clauses + if (p->error_indicator) { + return NULL; + } + expr_ty a; + asdl_seq* for_if_clauses_var; + if ( + (a = args_rule(p)) // args + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = _PyPegen_nonparen_genexp_in_call ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + { // args ',' expression for_if_clauses + if (p->error_indicator) { + return NULL; + } + Token * _literal; + expr_ty a; + expr_ty args_var; + asdl_seq* for_if_clauses_var; + if ( + (args_var = args_rule(p)) // args + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (a = expression_rule(p)) // expression + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } { // args ',' args if (p->error_indicator) { return NULL; diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index ca4ea824b3f28..f1e3f9efb2f6b 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -2100,3 +2100,24 @@ void *_PyPegen_arguments_parsing_error(Parser *p, expr_ty e) { return RAISE_SYNTAX_ERROR(msg); } + +void * +_PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args) +{ + /* The rule that calls this function is 'args for_if_clauses'. + For the input f(L, x for x in y), L and x are in args and + the for is parsed as a for_if_clause. We have to check if + len <= 1, so that input like dict((a, b) for a, b in x) + gets successfully parsed and then we pass the last + argument (x in the above example) as the location of the + error */ + Py_ssize_t len = asdl_seq_LEN(args->v.Call.args); + if (len <= 1) { + return NULL; + } + + return RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + (expr_ty) asdl_seq_GET(args->v.Call.args, len - 1), + "Generator expression must be parenthesized" + ); +} diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index 146804a896fd1..761e90f06db8e 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -152,7 +152,7 @@ RAISE_ERROR_KNOWN_LOCATION(Parser *p, PyObject *errtype, int lineno, #define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, msg, ##__VA_ARGS__) #define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, msg, ##__VA_ARGS__) #define RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, msg, ...) \ - RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, a->lineno, a->col_offset, msg, ##__VA_ARGS__) + RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, (a)->lineno, (a)->col_offset, msg, ##__VA_ARGS__) Py_LOCAL_INLINE(void *) CHECK_CALL(Parser *p, void *result) @@ -262,6 +262,7 @@ mod_ty _PyPegen_make_module(Parser *, asdl_seq *); // Error reporting helpers expr_ty _PyPegen_get_invalid_target(expr_ty e); void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); +void *_PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args); void *_PyPegen_parse(Parser *); From webhook-mailer at python.org Thu May 21 21:48:17 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 22 May 2020 01:48:17 -0000 Subject: [Python-checkins] Fix debug output in PEG parser generator (GH-20308) Message-ID: https://github.com/python/cpython/commit/b831129123dbb4bfba49824ae893448b05398f27 commit: b831129123dbb4bfba49824ae893448b05398f27 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-22T02:48:09+01:00 summary: Fix debug output in PEG parser generator (GH-20308) files: M Tools/peg_generator/pegen/c_generator.py diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 586f28cc46975..362698b0df62e 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -487,7 +487,7 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N rhs, is_loop=False, is_gather=node.is_gather(), rulename=node.name, ) if self.debug: - self.print('fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') + self.print(f'fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') self.print("_res = NULL;") self.print(" done:") with self.indent(): @@ -613,7 +613,7 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: else: if self.debug: self.print( - f'fprintf(stderr, "Hit without action [%d:%d]: %s\\n", mark, p->mark, "{node}");' + f'fprintf(stderr, "Hit without action [%d:%d]: %s\\n", _mark, p->mark, "{node}");' ) self.print( f"_res = _PyPegen_dummy_name(p, {', '.join(self.local_variable_names)});" @@ -621,7 +621,7 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: else: if self.debug: self.print( - f'fprintf(stderr, "Hit with default action [%d:%d]: %s\\n", mark, p->mark, "{node}");' + f'fprintf(stderr, "Hit with default action [%d:%d]: %s\\n", _mark, p->mark, "{node}");' ) self.print(f"_res = {self.local_variable_names[0]};") From webhook-mailer at python.org Fri May 22 09:54:50 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Fri, 22 May 2020 13:54:50 -0000 Subject: [Python-checkins] Restore missing column of digits (GH-20313) Message-ID: https://github.com/python/cpython/commit/bfaf5275ad9c0e8fa3935e6d651628c50e3c5c2d commit: bfaf5275ad9c0e8fa3935e6d651628c50e3c5c2d branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-22T06:54:42-07:00 summary: Restore missing column of digits (GH-20313) files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 72ea0c076e3e3..5fd051255da17 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -628,48 +628,48 @@ Here's a summary of performance improvements from Python 3.4 through Python 3.9: .. code-block:: none - Python version 3.4 3.5 3.6 3.7 3.8 3.9 - -------------- --- --- --- --- --- --- + Python version 3.4 3.5 3.6 3.7 3.8 3.9 + -------------- --- --- --- --- --- --- Variable and attribute read access: - read_local 7.1 7.1 5.4 5.1 3.9 4.0 - read_nonlocal 7.1 8.1 5.8 5.4 4.4 4.8 - read_global 5.5 19.0 14.3 13.6 7.6 7.7 - read_builtin 1.1 21.6 18.5 19.0 7.5 7.7 - read_classvar_from_class 5.6 26.5 20.7 19.5 18.4 18.6 - read_classvar_from_instance 2.8 23.5 18.8 17.1 16.4 20.1 - read_instancevar 2.4 33.1 28.0 26.3 25.4 27.7 - read_instancevar_slots 7.8 31.3 20.8 20.8 20.2 24.5 - read_namedtuple 3.8 57.5 45.0 46.8 18.4 23.2 - read_boundmethod 7.6 37.9 29.6 26.9 27.7 45.9 + read_local 7.1 7.1 5.4 5.1 3.9 4.0 + read_nonlocal 7.1 8.1 5.8 5.4 4.4 4.8 + read_global 15.5 19.0 14.3 13.6 7.6 7.7 + read_builtin 21.1 21.6 18.5 19.0 7.5 7.7 + read_classvar_from_class 25.6 26.5 20.7 19.5 18.4 18.6 + read_classvar_from_instance 22.8 23.5 18.8 17.1 16.4 20.1 + read_instancevar 32.4 33.1 28.0 26.3 25.4 27.7 + read_instancevar_slots 27.8 31.3 20.8 20.8 20.2 24.5 + read_namedtuple 73.8 57.5 45.0 46.8 18.4 23.2 + read_boundmethod 37.6 37.9 29.6 26.9 27.7 45.9 Variable and attribute write access: - write_local 8.7 9.3 5.5 5.3 4.3 4.2 - write_nonlocal 0.5 11.1 5.6 5.5 4.7 4.9 - write_global 9.7 21.2 18.0 18.0 15.8 17.2 - write_classvar 2.9 96.0 104.6 102.1 39.2 43.2 - write_instancevar 4.6 45.8 40.0 38.9 35.5 40.7 - write_instancevar_slots 5.6 36.1 27.3 26.6 25.7 27.7 + write_local 8.7 9.3 5.5 5.3 4.3 4.2 + write_nonlocal 10.5 11.1 5.6 5.5 4.7 4.9 + write_global 19.7 21.2 18.0 18.0 15.8 17.2 + write_classvar 92.9 96.0 104.6 102.1 39.2 43.2 + write_instancevar 44.6 45.8 40.0 38.9 35.5 40.7 + write_instancevar_slots 35.6 36.1 27.3 26.6 25.7 27.7 Data structure read access: - read_list 4.2 24.5 20.8 20.8 19.0 21.1 - read_deque 4.7 25.5 20.2 20.6 19.8 21.6 - read_dict 4.3 25.7 22.3 23.0 21.0 22.5 - read_strdict 2.6 24.3 19.5 21.2 18.9 21.6 + read_list 24.2 24.5 20.8 20.8 19.0 21.1 + read_deque 24.7 25.5 20.2 20.6 19.8 21.6 + read_dict 24.3 25.7 22.3 23.0 21.0 22.5 + read_strdict 22.6 24.3 19.5 21.2 18.9 21.6 Data structure write access: - write_list 7.1 28.5 22.5 21.6 20.0 21.6 - write_deque 8.7 30.1 22.7 21.8 23.5 23.2 - write_dict 1.4 33.3 29.3 29.2 24.7 27.8 - write_strdict 8.4 29.9 27.5 25.2 23.1 29.8 + write_list 27.1 28.5 22.5 21.6 20.0 21.6 + write_deque 28.7 30.1 22.7 21.8 23.5 23.2 + write_dict 31.4 33.3 29.3 29.2 24.7 27.8 + write_strdict 28.4 29.9 27.5 25.2 23.1 29.8 Stack (or queue) operations: - list_append_pop 13.4 112.7 75.4 74.2 50.8 53.9 - deque_append_pop 3.5 57.0 49.4 49.2 42.5 45.5 - deque_append_popleft 3.7 57.3 49.7 49.7 42.8 45.5 + list_append_pop 93.4 112.7 75.4 74.2 50.8 53.9 + deque_append_pop 43.5 57.0 49.4 49.2 42.5 45.5 + deque_append_popleft 43.7 57.3 49.7 49.7 42.8 45.5 Timing loop: - loop_overhead 0.5 0.6 0.4 0.3 0.3 0.3 + loop_overhead 0.5 0.6 0.4 0.3 0.3 0.3 These results were generated from the variable access benchmark script at: ``Tools/scripts/var_access_benchmark.py``. The benchmark script displays timings From webhook-mailer at python.org Fri May 22 10:18:56 2020 From: webhook-mailer at python.org (Huon Wilson) Date: Fri, 22 May 2020 14:18:56 -0000 Subject: [Python-checkins] bpo-40630: Add tracemalloc.reset_peak (GH-20102) Message-ID: https://github.com/python/cpython/commit/8b62644831443e400215eeb822c921f4f06c8977 commit: 8b62644831443e400215eeb822c921f4f06c8977 branch: master author: Huon Wilson committer: GitHub date: 2020-05-22T16:18:51+02:00 summary: bpo-40630: Add tracemalloc.reset_peak (GH-20102) The reset_peak function sets the peak memory size to the current size, representing a resetting of that metric. This allows for recording the peak of specific sections of code, ignoring other code that may have had a higher peak (since the most recent `tracemalloc.start()` or tracemalloc.clear_traces()` call). files: A Misc/NEWS.d/next/Library/2020-05-15-13-40-15.bpo-40630.YXEX_M.rst M Doc/library/tracemalloc.rst M Doc/whatsnew/3.10.rst M Lib/test/test_tracemalloc.py M Misc/ACKS M Modules/_tracemalloc.c M Modules/clinic/_tracemalloc.c.h diff --git a/Doc/library/tracemalloc.rst b/Doc/library/tracemalloc.rst index 3eee9457fb29a..fba1caab455d7 100644 --- a/Doc/library/tracemalloc.rst +++ b/Doc/library/tracemalloc.rst @@ -249,6 +249,47 @@ Example of output of the Python test suite:: See :meth:`Snapshot.statistics` for more options. +Record the current and peak size of all traced memory blocks +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following code computes two sums like ``0 + 1 + 2 + ...`` inefficiently, by +creating a list of those numbers. This list consumes a lot of memory +temporarily. We can use :func:`get_traced_memory` and :func:`reset_peak` to +observe the small memory usage after the sum is computed as well as the peak +memory usage during the computations:: + + import tracemalloc + + tracemalloc.start() + + # Example code: compute a sum with a large temporary list + large_sum = sum(list(range(100000))) + + first_size, first_peak = tracemalloc.get_traced_memory() + + tracemalloc.reset_peak() + + # Example code: compute a sum with a small temporary list + small_sum = sum(list(range(1000))) + + second_size, second_peak = tracemalloc.get_traced_memory() + + print(f"{first_size=}, {first_peak=}") + print(f"{second_size=}, {second_peak=}") + +Output:: + + first_size=664, first_peak=3592984 + second_size=804, second_peak=29704 + +Using :func:`reset_peak` ensured we could accurately record the peak during the +computation of ``small_sum``, even though it is much smaller than the overall +peak size of memory blocks since the :func:`start` call. Without the call to +:func:`reset_peak`, ``second_peak`` would still be the peak from the +computation ``large_sum`` (that is, equal to ``first_peak``). In this case, +both peaks are much higher than the final memory usage, and which suggests we +could optimise (by removing the unnecessary call to :class:`list`, and writing +``sum(range(...))``). API --- @@ -289,6 +330,24 @@ Functions :mod:`tracemalloc` module as a tuple: ``(current: int, peak: int)``. +.. function:: reset_peak() + + Set the peak size of memory blocks traced by the :mod:`tracemalloc` module + to the current size. + + Do nothing if the :mod:`tracemalloc` module is not tracing memory + allocations. + + This function only modifies the recorded peak size, and does not modify or + clear any traces, unlike :func:`clear_traces`. Snapshots taken with + :func:`take_snapshot` before a call to :func:`reset_peak` can be + meaningfully compared to snapshots taken after the call. + + See also :func:`get_traced_memory`. + + .. versionadded:: 3.10 + + .. function:: get_tracemalloc_memory() Get the memory usage in bytes of the :mod:`tracemalloc` module used to store diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 547778599ef61..e650f9405a811 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -86,6 +86,12 @@ New Modules Improved Modules ================ +tracemalloc +----------- + +Added :func:`tracemalloc.reset_peak` to set the peak size of traced memory +blocks to the current size, to measure the peak of specific pieces of code. +(Contributed by Huon Wilson in :issue:`40630`.) Optimizations ============= diff --git a/Lib/test/test_tracemalloc.py b/Lib/test/test_tracemalloc.py index 635a9d3981605..c5ae4e6d653bf 100644 --- a/Lib/test/test_tracemalloc.py +++ b/Lib/test/test_tracemalloc.py @@ -246,6 +246,30 @@ def test_clear_traces(self): traceback2 = tracemalloc.get_object_traceback(obj) self.assertIsNone(traceback2) + def test_reset_peak(self): + # Python allocates some internals objects, so the test must tolerate + # a small difference between the expected size and the real usage + tracemalloc.clear_traces() + + # Example: allocate a large piece of memory, temporarily + large_sum = sum(list(range(100000))) + size1, peak1 = tracemalloc.get_traced_memory() + + # reset_peak() resets peak to traced memory: peak2 < peak1 + tracemalloc.reset_peak() + size2, peak2 = tracemalloc.get_traced_memory() + self.assertGreaterEqual(peak2, size2) + self.assertLess(peak2, peak1) + + # check that peak continue to be updated if new memory is allocated: + # peak3 > peak2 + obj_size = 1024 * 1024 + obj, obj_traceback = allocate_bytes(obj_size) + size3, peak3 = tracemalloc.get_traced_memory() + self.assertGreaterEqual(peak3, size3) + self.assertGreater(peak3, peak2) + self.assertGreaterEqual(peak3 - peak2, obj_size) + def test_is_tracing(self): tracemalloc.stop() self.assertFalse(tracemalloc.is_tracing()) diff --git a/Misc/ACKS b/Misc/ACKS index 6511383fa25d7..a505a3d784036 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1863,6 +1863,7 @@ Alex Willmer David Wilson Geoff Wilson Greg V. Wilson +Huon Wilson J Derek Wilson Paul Winkler Jody Winston diff --git a/Misc/NEWS.d/next/Library/2020-05-15-13-40-15.bpo-40630.YXEX_M.rst b/Misc/NEWS.d/next/Library/2020-05-15-13-40-15.bpo-40630.YXEX_M.rst new file mode 100644 index 0000000000000..bb2e7452d3cfb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-15-13-40-15.bpo-40630.YXEX_M.rst @@ -0,0 +1,2 @@ +Added :func:`tracemalloc.reset_peak` to set the peak size of traced memory +blocks to the current size, to measure the peak of specific pieces of code. diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 4522d1afde908..567571657453e 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -1643,6 +1643,30 @@ _tracemalloc_get_traced_memory_impl(PyObject *module) return Py_BuildValue("nn", size, peak_size); } +/*[clinic input] +_tracemalloc.reset_peak + +Set the peak size of memory blocks traced by tracemalloc to the current size. + +Do nothing if the tracemalloc module is not tracing memory allocations. + +[clinic start generated code]*/ + +static PyObject * +_tracemalloc_reset_peak_impl(PyObject *module) +/*[clinic end generated code: output=140c2870f691dbb2 input=18afd0635066e9ce]*/ +{ + if (!_Py_tracemalloc_config.tracing) { + Py_RETURN_NONE; + } + + TABLES_LOCK(); + tracemalloc_peak_traced_memory = tracemalloc_traced_memory; + TABLES_UNLOCK(); + + Py_RETURN_NONE; +} + static PyMethodDef module_methods[] = { _TRACEMALLOC_IS_TRACING_METHODDEF @@ -1654,6 +1678,7 @@ static PyMethodDef module_methods[] = { _TRACEMALLOC_GET_TRACEBACK_LIMIT_METHODDEF _TRACEMALLOC_GET_TRACEMALLOC_MEMORY_METHODDEF _TRACEMALLOC_GET_TRACED_MEMORY_METHODDEF + _TRACEMALLOC_RESET_PEAK_METHODDEF /* sentinel */ {NULL, NULL} }; diff --git a/Modules/clinic/_tracemalloc.c.h b/Modules/clinic/_tracemalloc.c.h index 68fafdc3833d2..049cacd832663 100644 --- a/Modules/clinic/_tracemalloc.c.h +++ b/Modules/clinic/_tracemalloc.c.h @@ -197,4 +197,24 @@ _tracemalloc_get_traced_memory(PyObject *module, PyObject *Py_UNUSED(ignored)) { return _tracemalloc_get_traced_memory_impl(module); } -/*[clinic end generated code: output=1bc96dc569706afa input=a9049054013a1b77]*/ + +PyDoc_STRVAR(_tracemalloc_reset_peak__doc__, +"reset_peak($module, /)\n" +"--\n" +"\n" +"Set the peak size of memory blocks traced by tracemalloc to the current size.\n" +"\n" +"Do nothing if the tracemalloc module is not tracing memory allocations."); + +#define _TRACEMALLOC_RESET_PEAK_METHODDEF \ + {"reset_peak", (PyCFunction)_tracemalloc_reset_peak, METH_NOARGS, _tracemalloc_reset_peak__doc__}, + +static PyObject * +_tracemalloc_reset_peak_impl(PyObject *module); + +static PyObject * +_tracemalloc_reset_peak(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _tracemalloc_reset_peak_impl(module); +} +/*[clinic end generated code: output=a130117b1af821da input=a9049054013a1b77]*/ From webhook-mailer at python.org Fri May 22 10:29:05 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Fri, 22 May 2020 14:29:05 -0000 Subject: [Python-checkins] Let the argument clinic do the type checking for heapq (GH-20284) Message-ID: https://github.com/python/cpython/commit/0226f3eba0673f55025114537cc8141fb5dcbcdf commit: 0226f3eba0673f55025114537cc8141fb5dcbcdf branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-22T07:28:57-07:00 summary: Let the argument clinic do the type checking for heapq (GH-20284) files: M Modules/_heapqmodule.c M Modules/clinic/_heapqmodule.c.h diff --git a/Modules/_heapqmodule.c b/Modules/_heapqmodule.c index 4e85e046d385a..193478d79b456 100644 --- a/Modules/_heapqmodule.c +++ b/Modules/_heapqmodule.c @@ -113,7 +113,7 @@ siftup(PyListObject *heap, Py_ssize_t pos) /*[clinic input] _heapq.heappush - heap: object + heap: object(subclass_of='&PyList_Type') item: object / @@ -122,13 +122,8 @@ Push item onto heap, maintaining the heap invariant. static PyObject * _heapq_heappush_impl(PyObject *module, PyObject *heap, PyObject *item) -/*[clinic end generated code: output=912c094f47663935 input=7913545cb5118842]*/ +/*[clinic end generated code: output=912c094f47663935 input=7c69611f3698aceb]*/ { - if (!PyList_Check(heap)) { - PyErr_SetString(PyExc_TypeError, "heap argument must be a list"); - return NULL; - } - if (PyList_Append(heap, item)) return NULL; @@ -143,11 +138,6 @@ heappop_internal(PyObject *heap, int siftup_func(PyListObject *, Py_ssize_t)) PyObject *lastelt, *returnitem; Py_ssize_t n; - if (!PyList_Check(heap)) { - PyErr_SetString(PyExc_TypeError, "heap argument must be a list"); - return NULL; - } - /* raises IndexError if the heap is empty */ n = PyList_GET_SIZE(heap); if (n == 0) { @@ -177,15 +167,15 @@ heappop_internal(PyObject *heap, int siftup_func(PyListObject *, Py_ssize_t)) /*[clinic input] _heapq.heappop - heap: object + heap: object(subclass_of='&PyList_Type') / Pop the smallest item off the heap, maintaining the heap invariant. [clinic start generated code]*/ static PyObject * -_heapq_heappop(PyObject *module, PyObject *heap) -/*[clinic end generated code: output=e1bbbc9866bce179 input=9bd36317b806033d]*/ +_heapq_heappop_impl(PyObject *module, PyObject *heap) +/*[clinic end generated code: output=96dfe82d37d9af76 input=91487987a583c856]*/ { return heappop_internal(heap, siftup); } @@ -195,11 +185,6 @@ heapreplace_internal(PyObject *heap, PyObject *item, int siftup_func(PyListObjec { PyObject *returnitem; - if (!PyList_Check(heap)) { - PyErr_SetString(PyExc_TypeError, "heap argument must be a list"); - return NULL; - } - if (PyList_GET_SIZE(heap) == 0) { PyErr_SetString(PyExc_IndexError, "index out of range"); return NULL; @@ -219,7 +204,7 @@ heapreplace_internal(PyObject *heap, PyObject *item, int siftup_func(PyListObjec /*[clinic input] _heapq.heapreplace - heap: object + heap: object(subclass_of='&PyList_Type') item: object / @@ -236,7 +221,7 @@ this routine unless written as part of a conditional replacement: static PyObject * _heapq_heapreplace_impl(PyObject *module, PyObject *heap, PyObject *item) -/*[clinic end generated code: output=82ea55be8fbe24b4 input=e57ae8f4ecfc88e3]*/ +/*[clinic end generated code: output=82ea55be8fbe24b4 input=719202ac02ba10c8]*/ { return heapreplace_internal(heap, item, siftup); } @@ -244,7 +229,7 @@ _heapq_heapreplace_impl(PyObject *module, PyObject *heap, PyObject *item) /*[clinic input] _heapq.heappushpop - heap: object + heap: object(subclass_of='&PyList_Type') item: object / @@ -256,16 +241,11 @@ a separate call to heappop(). static PyObject * _heapq_heappushpop_impl(PyObject *module, PyObject *heap, PyObject *item) -/*[clinic end generated code: output=67231dc98ed5774f input=eb48c90ba77b2214]*/ +/*[clinic end generated code: output=67231dc98ed5774f input=5dc701f1eb4a4aa7]*/ { PyObject *returnitem; int cmp; - if (!PyList_Check(heap)) { - PyErr_SetString(PyExc_TypeError, "heap argument must be a list"); - return NULL; - } - if (PyList_GET_SIZE(heap) == 0) { Py_INCREF(item); return item; @@ -367,11 +347,6 @@ heapify_internal(PyObject *heap, int siftup_func(PyListObject *, Py_ssize_t)) { Py_ssize_t i, n; - if (!PyList_Check(heap)) { - PyErr_SetString(PyExc_TypeError, "heap argument must be a list"); - return NULL; - } - /* For heaps likely to be bigger than L1 cache, we use the cache friendly heapify function. For smaller heaps that fit entirely in cache, we prefer the simpler algorithm with less branching. @@ -396,15 +371,15 @@ heapify_internal(PyObject *heap, int siftup_func(PyListObject *, Py_ssize_t)) /*[clinic input] _heapq.heapify - heap: object + heap: object(subclass_of='&PyList_Type') / Transform list into a heap, in-place, in O(len(heap)) time. [clinic start generated code]*/ static PyObject * -_heapq_heapify(PyObject *module, PyObject *heap) -/*[clinic end generated code: output=11483f23627c4616 input=872c87504b8de970]*/ +_heapq_heapify_impl(PyObject *module, PyObject *heap) +/*[clinic end generated code: output=e63a636fcf83d6d0 input=53bb7a2166febb73]*/ { return heapify_internal(heap, siftup); } @@ -508,15 +483,15 @@ siftup_max(PyListObject *heap, Py_ssize_t pos) /*[clinic input] _heapq._heappop_max - heap: object + heap: object(subclass_of='&PyList_Type') / Maxheap variant of heappop. [clinic start generated code]*/ static PyObject * -_heapq__heappop_max(PyObject *module, PyObject *heap) -/*[clinic end generated code: output=acd30acf6384b13c input=62ede3ba9117f541]*/ +_heapq__heappop_max_impl(PyObject *module, PyObject *heap) +/*[clinic end generated code: output=9e77aadd4e6a8760 input=362c06e1c7484793]*/ { return heappop_internal(heap, siftup_max); } @@ -524,7 +499,7 @@ _heapq__heappop_max(PyObject *module, PyObject *heap) /*[clinic input] _heapq._heapreplace_max - heap: object + heap: object(subclass_of='&PyList_Type') item: object / @@ -534,7 +509,7 @@ Maxheap variant of heapreplace. static PyObject * _heapq__heapreplace_max_impl(PyObject *module, PyObject *heap, PyObject *item) -/*[clinic end generated code: output=8ad7545e4a5e8adb input=6d8f25131e0f0e5f]*/ +/*[clinic end generated code: output=8ad7545e4a5e8adb input=f2dd27cbadb948d7]*/ { return heapreplace_internal(heap, item, siftup_max); } @@ -542,15 +517,15 @@ _heapq__heapreplace_max_impl(PyObject *module, PyObject *heap, /*[clinic input] _heapq._heapify_max - heap: object + heap: object(subclass_of='&PyList_Type') / Maxheap variant of heapify. [clinic start generated code]*/ static PyObject * -_heapq__heapify_max(PyObject *module, PyObject *heap) -/*[clinic end generated code: output=1c6bb6b60d6a2133 input=cdfcc6835b14110d]*/ +_heapq__heapify_max_impl(PyObject *module, PyObject *heap) +/*[clinic end generated code: output=2cb028beb4a8b65e input=c1f765ee69f124b8]*/ { return heapify_internal(heap, siftup_max); } diff --git a/Modules/clinic/_heapqmodule.c.h b/Modules/clinic/_heapqmodule.c.h index 55403706ba05c..a894315a766c2 100644 --- a/Modules/clinic/_heapqmodule.c.h +++ b/Modules/clinic/_heapqmodule.c.h @@ -24,6 +24,10 @@ _heapq_heappush(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("heappush", nargs, 2, 2)) { goto exit; } + if (!PyList_Check(args[0])) { + _PyArg_BadArgument("heappush", "argument 1", "list", args[0]); + goto exit; + } heap = args[0]; item = args[1]; return_value = _heapq_heappush_impl(module, heap, item); @@ -41,6 +45,26 @@ PyDoc_STRVAR(_heapq_heappop__doc__, #define _HEAPQ_HEAPPOP_METHODDEF \ {"heappop", (PyCFunction)_heapq_heappop, METH_O, _heapq_heappop__doc__}, +static PyObject * +_heapq_heappop_impl(PyObject *module, PyObject *heap); + +static PyObject * +_heapq_heappop(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyObject *heap; + + if (!PyList_Check(arg)) { + _PyArg_BadArgument("heappop", "argument", "list", arg); + goto exit; + } + heap = arg; + return_value = _heapq_heappop_impl(module, heap); + +exit: + return return_value; +} + PyDoc_STRVAR(_heapq_heapreplace__doc__, "heapreplace($module, heap, item, /)\n" "--\n" @@ -71,6 +95,10 @@ _heapq_heapreplace(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("heapreplace", nargs, 2, 2)) { goto exit; } + if (!PyList_Check(args[0])) { + _PyArg_BadArgument("heapreplace", "argument 1", "list", args[0]); + goto exit; + } heap = args[0]; item = args[1]; return_value = _heapq_heapreplace_impl(module, heap, item); @@ -104,6 +132,10 @@ _heapq_heappushpop(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("heappushpop", nargs, 2, 2)) { goto exit; } + if (!PyList_Check(args[0])) { + _PyArg_BadArgument("heappushpop", "argument 1", "list", args[0]); + goto exit; + } heap = args[0]; item = args[1]; return_value = _heapq_heappushpop_impl(module, heap, item); @@ -121,6 +153,26 @@ PyDoc_STRVAR(_heapq_heapify__doc__, #define _HEAPQ_HEAPIFY_METHODDEF \ {"heapify", (PyCFunction)_heapq_heapify, METH_O, _heapq_heapify__doc__}, +static PyObject * +_heapq_heapify_impl(PyObject *module, PyObject *heap); + +static PyObject * +_heapq_heapify(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyObject *heap; + + if (!PyList_Check(arg)) { + _PyArg_BadArgument("heapify", "argument", "list", arg); + goto exit; + } + heap = arg; + return_value = _heapq_heapify_impl(module, heap); + +exit: + return return_value; +} + PyDoc_STRVAR(_heapq__heappop_max__doc__, "_heappop_max($module, heap, /)\n" "--\n" @@ -130,6 +182,26 @@ PyDoc_STRVAR(_heapq__heappop_max__doc__, #define _HEAPQ__HEAPPOP_MAX_METHODDEF \ {"_heappop_max", (PyCFunction)_heapq__heappop_max, METH_O, _heapq__heappop_max__doc__}, +static PyObject * +_heapq__heappop_max_impl(PyObject *module, PyObject *heap); + +static PyObject * +_heapq__heappop_max(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyObject *heap; + + if (!PyList_Check(arg)) { + _PyArg_BadArgument("_heappop_max", "argument", "list", arg); + goto exit; + } + heap = arg; + return_value = _heapq__heappop_max_impl(module, heap); + +exit: + return return_value; +} + PyDoc_STRVAR(_heapq__heapreplace_max__doc__, "_heapreplace_max($module, heap, item, /)\n" "--\n" @@ -153,6 +225,10 @@ _heapq__heapreplace_max(PyObject *module, PyObject *const *args, Py_ssize_t narg if (!_PyArg_CheckPositional("_heapreplace_max", nargs, 2, 2)) { goto exit; } + if (!PyList_Check(args[0])) { + _PyArg_BadArgument("_heapreplace_max", "argument 1", "list", args[0]); + goto exit; + } heap = args[0]; item = args[1]; return_value = _heapq__heapreplace_max_impl(module, heap, item); @@ -169,4 +245,24 @@ PyDoc_STRVAR(_heapq__heapify_max__doc__, #define _HEAPQ__HEAPIFY_MAX_METHODDEF \ {"_heapify_max", (PyCFunction)_heapq__heapify_max, METH_O, _heapq__heapify_max__doc__}, -/*[clinic end generated code: output=37ef2a3319971c8d input=a9049054013a1b77]*/ + +static PyObject * +_heapq__heapify_max_impl(PyObject *module, PyObject *heap); + +static PyObject * +_heapq__heapify_max(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyObject *heap; + + if (!PyList_Check(arg)) { + _PyArg_BadArgument("_heapify_max", "argument", "list", arg); + goto exit; + } + heap = arg; + return_value = _heapq__heapify_max_impl(module, heap); + +exit: + return return_value; +} +/*[clinic end generated code: output=9975cf51762878d5 input=a9049054013a1b77]*/ From webhook-mailer at python.org Fri May 22 11:19:23 2020 From: webhook-mailer at python.org (Florian Dahlitz) Date: Fri, 22 May 2020 15:19:23 -0000 Subject: [Python-checkins] bpo-40730: Remove redundant 'to' (GH-20316) Message-ID: https://github.com/python/cpython/commit/30d5a7364db9e65ccabbdce2c20b84fe2fb233fb commit: 30d5a7364db9e65ccabbdce2c20b84fe2fb233fb branch: master author: Florian Dahlitz committer: GitHub date: 2020-05-22T08:19:18-07:00 summary: bpo-40730: Remove redundant 'to' (GH-20316) @ericvsmith I guess it is correct to merge it into master and not 3.9 directly? Automerge-Triggered-By: @ericvsmith files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 5fd051255da17..ebb24ebb026fc 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -605,7 +605,7 @@ Optimizations sums = [s for s in [0] for x in data for s in [s + x]] - Unlike to the ``:=`` operator this idiom does not leak a variable to the + Unlike the ``:=`` operator this idiom does not leak a variable to the outer scope. (Contributed by Serhiy Storchaka in :issue:`32856`.) From webhook-mailer at python.org Fri May 22 12:11:15 2020 From: webhook-mailer at python.org (Ammar Askar) Date: Fri, 22 May 2020 16:11:15 -0000 Subject: [Python-checkins] bpo-40705: Fix use-after-free in _zoneinfo's module_free (GH-20280) Message-ID: https://github.com/python/cpython/commit/06a1b8915d6674e40f0dccc422ca2c06212392d8 commit: 06a1b8915d6674e40f0dccc422ca2c06212392d8 branch: master author: Ammar Askar committer: GitHub date: 2020-05-22T12:10:55-04:00 summary: bpo-40705: Fix use-after-free in _zoneinfo's module_free (GH-20280) files: M Modules/_zoneinfo.c diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index d7e7157657643..d852c763e2e3d 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -2490,6 +2490,7 @@ new_weak_cache() static int initialize_caches() { + // TODO: Move to a PyModule_GetState / PEP 573 based caching system. if (TIMEDELTA_CACHE == NULL) { TIMEDELTA_CACHE = PyDict_New(); } @@ -2603,14 +2604,16 @@ module_free() xdecref_ttinfo(&NO_TTINFO); - Py_XDECREF(TIMEDELTA_CACHE); - if (!Py_REFCNT(TIMEDELTA_CACHE)) { - TIMEDELTA_CACHE = NULL; + if (TIMEDELTA_CACHE != NULL && Py_REFCNT(TIMEDELTA_CACHE) > 1) { + Py_DECREF(TIMEDELTA_CACHE); + } else { + Py_CLEAR(TIMEDELTA_CACHE); } - Py_XDECREF(ZONEINFO_WEAK_CACHE); - if (!Py_REFCNT(ZONEINFO_WEAK_CACHE)) { - ZONEINFO_WEAK_CACHE = NULL; + if (ZONEINFO_WEAK_CACHE != NULL && Py_REFCNT(ZONEINFO_WEAK_CACHE) > 1) { + Py_DECREF(ZONEINFO_WEAK_CACHE); + } else { + Py_CLEAR(ZONEINFO_WEAK_CACHE); } strong_cache_free(ZONEINFO_STRONG_CACHE); From webhook-mailer at python.org Fri May 22 14:04:54 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 22 May 2020 18:04:54 -0000 Subject: [Python-checkins] bpo-9216: hashlib usedforsecurity fixes (GH-20258) Message-ID: https://github.com/python/cpython/commit/909b5714e1303357868bc5e281c1cf508d5d5a17 commit: 909b5714e1303357868bc5e281c1cf508d5d5a17 branch: master author: Christian Heimes committer: GitHub date: 2020-05-22T11:04:33-07:00 summary: bpo-9216: hashlib usedforsecurity fixes (GH-20258) func:`hashlib.new` passed ``usedforsecurity`` to OpenSSL EVP constructor ``_hashlib.new()``. test_hashlib and test_smtplib handle strict security policy better. Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran files: A Misc/NEWS.d/next/Library/2020-05-20-12-53-20.bpo-9216.ps7Yf1.rst M Lib/hashlib.py M Lib/test/test_hashlib.py M Lib/test/test_smtplib.py M Lib/test/test_tools/test_md5sum.py M Lib/test/test_urllib2_localnet.py diff --git a/Lib/hashlib.py b/Lib/hashlib.py index 0f81de094ca6e..8d119a4225db9 100644 --- a/Lib/hashlib.py +++ b/Lib/hashlib.py @@ -154,7 +154,7 @@ def __hash_new(name, data=b'', **kwargs): # salt, personal, tree hashing or SSE. return __get_builtin_constructor(name)(data, **kwargs) try: - return _hashlib.new(name, data) + return _hashlib.new(name, data, **kwargs) except ValueError: # If the _hashlib module (OpenSSL) doesn't support the named # hash, try using our builtin implementations. diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py index b901468db38b1..d40acd5889913 100644 --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -13,6 +13,7 @@ import itertools import os import sys +import sysconfig import threading import unittest import warnings @@ -26,11 +27,20 @@ c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) +builtin_hashes = sysconfig.get_config_var("PY_BUILTIN_HASHLIB_HASHES") +if builtin_hashes is None: + builtin_hashes = {'md5', 'sha1', 'sha256', 'sha512', 'sha3', 'blake2'} +else: + builtin_hashes = { + m.strip() for m in builtin_hashes.strip('"').lower().split(",") + } + try: - from _hashlib import HASH, HASHXOF + from _hashlib import HASH, HASHXOF, openssl_md_meth_names except ImportError: HASH = None HASHXOF = None + openssl_md_meth_names = frozenset() try: import _blake2 @@ -175,10 +185,17 @@ def hash_constructors(self): constructors = self.constructors_to_test.values() return itertools.chain.from_iterable(constructors) + @property + def is_fips_mode(self): + if hasattr(self._hashlib, "get_fips_mode"): + return self._hashlib.get_fips_mode() + else: + return None + def test_hash_array(self): a = array.array("b", range(10)) for cons in self.hash_constructors: - c = cons(a) + c = cons(a, usedforsecurity=False) if c.name in self.shakes: c.hexdigest(16) else: @@ -193,14 +210,26 @@ def test_algorithms_available(self): self.assertTrue(set(hashlib.algorithms_guaranteed). issubset(hashlib.algorithms_available)) - def test_usedforsecurity(self): + def test_usedforsecurity_true(self): + hashlib.new("sha256", usedforsecurity=True) + if self.is_fips_mode: + self.skipTest("skip in FIPS mode") for cons in self.hash_constructors: cons(usedforsecurity=True) - cons(usedforsecurity=False) cons(b'', usedforsecurity=True) - cons(b'', usedforsecurity=False) - hashlib.new("sha256", usedforsecurity=True) + hashlib.new("md5", usedforsecurity=True) + hashlib.md5(usedforsecurity=True) + if self._hashlib is not None: + self._hashlib.new("md5", usedforsecurity=True) + self._hashlib.openssl_md5(usedforsecurity=True) + + def test_usedforsecurity_false(self): hashlib.new("sha256", usedforsecurity=False) + for cons in self.hash_constructors: + cons(usedforsecurity=False) + cons(b'', usedforsecurity=False) + hashlib.new("md5", usedforsecurity=False) + hashlib.md5(usedforsecurity=False) if self._hashlib is not None: self._hashlib.new("md5", usedforsecurity=False) self._hashlib.openssl_md5(usedforsecurity=False) @@ -240,7 +269,7 @@ def test_get_builtin_constructor(self): def test_hexdigest(self): for cons in self.hash_constructors: - h = cons() + h = cons(usedforsecurity=False) if h.name in self.shakes: self.assertIsInstance(h.digest(16), bytes) self.assertEqual(hexstr(h.digest(16)), h.hexdigest(16)) @@ -252,7 +281,7 @@ def test_digest_length_overflow(self): # See issue #34922 large_sizes = (2**29, 2**32-10, 2**32+10, 2**61, 2**64-10, 2**64+10) for cons in self.hash_constructors: - h = cons() + h = cons(usedforsecurity=False) if h.name not in self.shakes: continue if HASH is not None and isinstance(h, HASH): @@ -266,13 +295,16 @@ def test_digest_length_overflow(self): def test_name_attribute(self): for cons in self.hash_constructors: - h = cons() + h = cons(usedforsecurity=False) self.assertIsInstance(h.name, str) if h.name in self.supported_hash_names: self.assertIn(h.name, self.supported_hash_names) else: self.assertNotIn(h.name, self.supported_hash_names) - self.assertEqual(h.name, hashlib.new(h.name).name) + self.assertEqual( + h.name, + hashlib.new(h.name, usedforsecurity=False).name + ) def test_large_update(self): aas = b'a' * 128 @@ -281,7 +313,7 @@ def test_large_update(self): dees = b'd' * 2048 # HASHLIB_GIL_MINSIZE for cons in self.hash_constructors: - m1 = cons() + m1 = cons(usedforsecurity=False) m1.update(aas) m1.update(bees) m1.update(cees) @@ -291,15 +323,15 @@ def test_large_update(self): else: args = () - m2 = cons() + m2 = cons(usedforsecurity=False) m2.update(aas + bees + cees + dees) self.assertEqual(m1.digest(*args), m2.digest(*args)) - m3 = cons(aas + bees + cees + dees) + m3 = cons(aas + bees + cees + dees, usedforsecurity=False) self.assertEqual(m1.digest(*args), m3.digest(*args)) # verify copy() doesn't touch original - m4 = cons(aas + bees + cees) + m4 = cons(aas + bees + cees, usedforsecurity=False) m4_digest = m4.digest(*args) m4_copy = m4.copy() m4_copy.update(dees) @@ -359,7 +391,7 @@ def check_blocksize_name(self, name, block_size=0, digest_size=0, digest_length=None): constructors = self.constructors_to_test[name] for hash_object_constructor in constructors: - m = hash_object_constructor() + m = hash_object_constructor(usedforsecurity=False) self.assertEqual(m.block_size, block_size) self.assertEqual(m.digest_size, digest_size) if digest_length: @@ -418,15 +450,24 @@ def test_blocksize_name_blake2(self): self.check_blocksize_name('blake2s', 64, 32) def test_case_md5_0(self): - self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e') + self.check( + 'md5', b'', 'd41d8cd98f00b204e9800998ecf8427e', + usedforsecurity=False + ) def test_case_md5_1(self): - self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72') + self.check( + 'md5', b'abc', '900150983cd24fb0d6963f7d28e17f72', + usedforsecurity=False + ) def test_case_md5_2(self): - self.check('md5', - b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - 'd174ab98d277d9f5a5611c2c9f419d9f') + self.check( + 'md5', + b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'd174ab98d277d9f5a5611c2c9f419d9f', + usedforsecurity=False + ) @unittest.skipIf(sys.maxsize < _4G + 5, 'test cannot run on 32-bit systems') @bigmemtest(size=_4G + 5, memuse=1, dry_run=False) @@ -806,22 +847,28 @@ def test_gil(self): gil_minsize = 2048 for cons in self.hash_constructors: - m = cons() + m = cons(usedforsecurity=False) m.update(b'1') m.update(b'#' * gil_minsize) m.update(b'1') - m = cons(b'x' * gil_minsize) + m = cons(b'x' * gil_minsize, usedforsecurity=False) m.update(b'1') - m = hashlib.md5() + m = hashlib.sha256() m.update(b'1') m.update(b'#' * gil_minsize) m.update(b'1') - self.assertEqual(m.hexdigest(), 'cb1e1a2cbc80be75e19935d621fb9b21') + self.assertEqual( + m.hexdigest(), + '1cfceca95989f51f658e3f3ffe7f1cd43726c9e088c13ee10b46f57cef135b94' + ) - m = hashlib.md5(b'x' * gil_minsize) - self.assertEqual(m.hexdigest(), 'cfb767f225d58469c5de3632a8803958') + m = hashlib.sha256(b'1' + b'#' * gil_minsize + b'1') + self.assertEqual( + m.hexdigest(), + '1cfceca95989f51f658e3f3ffe7f1cd43726c9e088c13ee10b46f57cef135b94' + ) @support.reap_threads def test_threaded_hashing(self): @@ -859,10 +906,10 @@ def hash_in_chunks(chunk_size): self.assertEqual(expected_hash, hasher.hexdigest()) - @unittest.skipUnless(hasattr(c_hashlib, 'get_fips_mode'), - 'need _hashlib.get_fips_mode') def test_get_fips_mode(self): - self.assertIsInstance(c_hashlib.get_fips_mode(), int) + fips_mode = self.is_fips_mode + if fips_mode is not None: + self.assertIsInstance(fips_mode, int) @unittest.skipUnless(HASH is not None, 'need _hashlib') def test_internal_types(self): @@ -934,8 +981,10 @@ class KDFTests(unittest.TestCase): (bytes.fromhex('9d9e9c4cd21fe4be24d5b8244c759665'), None),], } - def _test_pbkdf2_hmac(self, pbkdf2): + def _test_pbkdf2_hmac(self, pbkdf2, supported): for digest_name, results in self.pbkdf2_results.items(): + if digest_name not in supported: + continue for i, vector in enumerate(self.pbkdf2_test_vectors): password, salt, rounds, dklen = vector expected, overwrite_dklen = results[i] @@ -946,6 +995,7 @@ def _test_pbkdf2_hmac(self, pbkdf2): (digest_name, password, salt, rounds, dklen)) out = pbkdf2(digest_name, memoryview(password), memoryview(salt), rounds, dklen) + self.assertEqual(out, expected) out = pbkdf2(digest_name, bytearray(password), bytearray(salt), rounds, dklen) self.assertEqual(out, expected) @@ -967,12 +1017,12 @@ def _test_pbkdf2_hmac(self, pbkdf2): self.assertEqual(out, self.pbkdf2_results['sha1'][0][0]) def test_pbkdf2_hmac_py(self): - self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac) + self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac, builtin_hashes) @unittest.skipUnless(hasattr(c_hashlib, 'pbkdf2_hmac'), ' test requires OpenSSL > 1.0') def test_pbkdf2_hmac_c(self): - self._test_pbkdf2_hmac(c_hashlib.pbkdf2_hmac) + self._test_pbkdf2_hmac(c_hashlib.pbkdf2_hmac, openssl_md_meth_names) @unittest.skipUnless(hasattr(c_hashlib, 'scrypt'), diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py index c1bd2e291255b..576299900318d 100644 --- a/Lib/test/test_smtplib.py +++ b/Lib/test/test_smtplib.py @@ -1067,6 +1067,7 @@ def testAUTH_CRAM_MD5(self): self.assertEqual(resp, (235, b'Authentication Succeeded')) smtp.close() + @hashlib_helper.requires_hashdigest('md5') def testAUTH_multiple(self): # Test that multiple authentication methods are tried. self.serv.add_feature("AUTH BOGUS PLAIN LOGIN CRAM-MD5") diff --git a/Lib/test/test_tools/test_md5sum.py b/Lib/test/test_tools/test_md5sum.py index fb565b73778f7..321bc4bb36282 100644 --- a/Lib/test/test_tools/test_md5sum.py +++ b/Lib/test/test_tools/test_md5sum.py @@ -3,12 +3,14 @@ import os import unittest from test import support +from test.support import hashlib_helper from test.support.script_helper import assert_python_ok, assert_python_failure from test.test_tools import scriptsdir, skip_if_missing skip_if_missing() + at hashlib_helper.requires_hashdigest('md5') class MD5SumTests(unittest.TestCase): @classmethod def setUpClass(cls): diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py index 421b9f7de2e21..ed426b05a7198 100644 --- a/Lib/test/test_urllib2_localnet.py +++ b/Lib/test/test_urllib2_localnet.py @@ -316,6 +316,7 @@ def test_basic_auth_httperror(self): self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, self.server_url) + at hashlib_helper.requires_hashdigest("md5") class ProxyAuthTests(unittest.TestCase): URL = "http://localhost" @@ -323,7 +324,6 @@ class ProxyAuthTests(unittest.TestCase): PASSWD = "test123" REALM = "TestRealm" - @hashlib_helper.requires_hashdigest("md5") def setUp(self): super(ProxyAuthTests, self).setUp() # Ignore proxy bypass settings in the environment. diff --git a/Misc/NEWS.d/next/Library/2020-05-20-12-53-20.bpo-9216.ps7Yf1.rst b/Misc/NEWS.d/next/Library/2020-05-20-12-53-20.bpo-9216.ps7Yf1.rst new file mode 100644 index 0000000000000..37542e8caffd4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-20-12-53-20.bpo-9216.ps7Yf1.rst @@ -0,0 +1,3 @@ +func:`hashlib.new` passed ``usedforsecurity`` to OpenSSL EVP constructor +``_hashlib.new()``. test_hashlib and test_smtplib handle strict security +policy better. From webhook-mailer at python.org Fri May 22 16:33:42 2020 From: webhook-mailer at python.org (Chris Jerdonek) Date: Fri, 22 May 2020 20:33:42 -0000 Subject: [Python-checkins] bpo-40696: Fix a hang that can arise after gen.throw() (GH-20287) Message-ID: https://github.com/python/cpython/commit/7c30d12bd5359b0f66c4fbc98aa055398bcc8a7e commit: 7c30d12bd5359b0f66c4fbc98aa055398bcc8a7e branch: master author: Chris Jerdonek committer: GitHub date: 2020-05-22T13:33:27-07:00 summary: bpo-40696: Fix a hang that can arise after gen.throw() (GH-20287) This updates _PyErr_ChainStackItem() to use _PyErr_SetObject() instead of _PyErr_ChainExceptions(). This prevents a hang in certain circumstances because _PyErr_SetObject() performs checks to prevent cycles in the exception context chain while _PyErr_ChainExceptions() doesn't. files: A Misc/NEWS.d/next/Core and Builtins/2020-05-21-01-54-00.bpo-40696.u3n8Wx.rst M Include/internal/pycore_pyerrors.h M Lib/test/test_asyncio/test_tasks.py M Lib/test/test_generators.py M Modules/_asynciomodule.c M Objects/genobject.c M Python/errors.c diff --git a/Include/internal/pycore_pyerrors.h b/Include/internal/pycore_pyerrors.h index 3290a37051e0f..2cf1160afc014 100644 --- a/Include/internal/pycore_pyerrors.h +++ b/Include/internal/pycore_pyerrors.h @@ -51,7 +51,7 @@ PyAPI_FUNC(void) _PyErr_SetObject( PyObject *value); PyAPI_FUNC(void) _PyErr_ChainStackItem( - _PyErr_StackItem *exc_state); + _PyErr_StackItem *exc_info); PyAPI_FUNC(void) _PyErr_Clear(PyThreadState *tstate); diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 63968e2a17894..3734013fad989 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -536,9 +536,42 @@ async def run(): self.assertEqual((type(chained), chained.args), (KeyError, (3,))) - task = self.new_task(loop, run()) - loop.run_until_complete(task) - loop.close() + try: + task = self.new_task(loop, run()) + loop.run_until_complete(task) + finally: + loop.close() + + def test_exception_chaining_after_await_with_context_cycle(self): + # Check trying to create an exception context cycle: + # https://bugs.python.org/issue40696 + has_cycle = None + loop = asyncio.new_event_loop() + self.set_event_loop(loop) + + async def process_exc(exc): + raise exc + + async def run(): + nonlocal has_cycle + try: + raise KeyError('a') + except Exception as exc: + task = self.new_task(loop, process_exc(exc)) + try: + await task + except BaseException as exc: + has_cycle = (exc is exc.__context__) + # Prevent a hang if has_cycle is True. + exc.__context__ = None + + try: + task = self.new_task(loop, run()) + loop.run_until_complete(task) + finally: + loop.close() + # This also distinguishes from the initial has_cycle=None. + self.assertEqual(has_cycle, False) def test_cancel(self): diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 87cc2dfc8c679..bf482213c178a 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -371,6 +371,32 @@ def g(): context = cm.exception.__context__ self.assertEqual((type(context), context.args), (KeyError, ('a',))) + def test_exception_context_with_yield_from_with_context_cycle(self): + # Check trying to create an exception context cycle: + # https://bugs.python.org/issue40696 + has_cycle = None + + def f(): + yield + + def g(exc): + nonlocal has_cycle + try: + raise exc + except Exception: + try: + yield from f() + except Exception as exc: + has_cycle = (exc is exc.__context__) + yield + + exc = KeyError('a') + gen = g(exc) + gen.send(None) + gen.throw(exc) + # This also distinguishes from the initial has_cycle=None. + self.assertEqual(has_cycle, False) + def test_throw_after_none_exc_type(self): def g(): try: diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-21-01-54-00.bpo-40696.u3n8Wx.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-21-01-54-00.bpo-40696.u3n8Wx.rst new file mode 100644 index 0000000000000..f99bdea2e3177 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-21-01-54-00.bpo-40696.u3n8Wx.rst @@ -0,0 +1,2 @@ +Fix a hang that can arise after :meth:`generator.throw` due to a cycle +in the exception context chain. diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 1b6a579682430..0608c40f6c339 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -612,19 +612,20 @@ create_cancelled_error(PyObject *msg) } static void -set_cancelled_error(PyObject *msg) +future_set_cancelled_error(FutureObj *fut) { - PyObject *exc = create_cancelled_error(msg); + PyObject *exc = create_cancelled_error(fut->fut_cancel_msg); PyErr_SetObject(asyncio_CancelledError, exc); Py_DECREF(exc); + + _PyErr_ChainStackItem(&fut->fut_cancelled_exc_state); } static int future_get_result(FutureObj *fut, PyObject **result) { if (fut->fut_state == STATE_CANCELLED) { - set_cancelled_error(fut->fut_cancel_msg); - _PyErr_ChainStackItem(&fut->fut_cancelled_exc_state); + future_set_cancelled_error(fut); return -1; } @@ -866,8 +867,7 @@ _asyncio_Future_exception_impl(FutureObj *self) } if (self->fut_state == STATE_CANCELLED) { - set_cancelled_error(self->fut_cancel_msg); - _PyErr_ChainStackItem(&self->fut_cancelled_exc_state); + future_set_cancelled_error(self); return NULL; } diff --git a/Objects/genobject.c b/Objects/genobject.c index 271720bdf8b4c..09efbab69a7d3 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -203,13 +203,15 @@ gen_send_ex(PyGenObject *gen, PyObject *arg, int exc, int closing) assert(f->f_back == NULL); f->f_back = tstate->frame; - if (exc) { - _PyErr_ChainStackItem(&gen->gi_exc_state); - } - gen->gi_running = 1; gen->gi_exc_state.previous_item = tstate->exc_info; tstate->exc_info = &gen->gi_exc_state; + + if (exc) { + assert(_PyErr_Occurred(tstate)); + _PyErr_ChainStackItem(NULL); + } + result = _PyEval_EvalFrame(tstate, f, exc); tstate->exc_info = gen->gi_exc_state.previous_item; gen->gi_exc_state.previous_item = NULL; diff --git a/Python/errors.c b/Python/errors.c index 3b42c1120b8d0..70365aaca585b 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -477,7 +477,9 @@ PyErr_SetExcInfo(PyObject *p_type, PyObject *p_value, PyObject *p_traceback) /* Like PyErr_Restore(), but if an exception is already set, set the context associated with it. - */ + + The caller is responsible for ensuring that this call won't create + any cycles in the exception context chain. */ void _PyErr_ChainExceptions(PyObject *exc, PyObject *val, PyObject *tb) { @@ -512,18 +514,60 @@ _PyErr_ChainExceptions(PyObject *exc, PyObject *val, PyObject *tb) } } +/* Set the currently set exception's context to the given exception. + + If the provided exc_info is NULL, then the current Python thread state's + exc_info will be used for the context instead. + + This function can only be called when _PyErr_Occurred() is true. + Also, this function won't create any cycles in the exception context + chain to the extent that _PyErr_SetObject ensures this. */ void -_PyErr_ChainStackItem(_PyErr_StackItem *exc_state) +_PyErr_ChainStackItem(_PyErr_StackItem *exc_info) { - if (exc_state->exc_type == NULL || exc_state->exc_type == Py_None) { + PyThreadState *tstate = _PyThreadState_GET(); + assert(_PyErr_Occurred(tstate)); + + int exc_info_given; + if (exc_info == NULL) { + exc_info_given = 0; + exc_info = tstate->exc_info; + } else { + exc_info_given = 1; + } + if (exc_info->exc_type == NULL || exc_info->exc_type == Py_None) { return; } - Py_INCREF(exc_state->exc_type); - Py_XINCREF(exc_state->exc_value); - Py_XINCREF(exc_state->exc_traceback); - _PyErr_ChainExceptions(exc_state->exc_type, - exc_state->exc_value, - exc_state->exc_traceback); + + _PyErr_StackItem *saved_exc_info; + if (exc_info_given) { + /* Temporarily set the thread state's exc_info since this is what + _PyErr_SetObject uses for implicit exception chaining. */ + saved_exc_info = tstate->exc_info; + tstate->exc_info = exc_info; + } + + PyObject *exc, *val, *tb; + _PyErr_Fetch(tstate, &exc, &val, &tb); + + PyObject *exc2, *val2, *tb2; + exc2 = exc_info->exc_type; + val2 = exc_info->exc_value; + tb2 = exc_info->exc_traceback; + _PyErr_NormalizeException(tstate, &exc2, &val2, &tb2); + if (tb2 != NULL) { + PyException_SetTraceback(val2, tb2); + } + + /* _PyErr_SetObject sets the context from PyThreadState. */ + _PyErr_SetObject(tstate, exc, val); + Py_DECREF(exc); // since _PyErr_Occurred was true + Py_XDECREF(val); + Py_XDECREF(tb); + + if (exc_info_given) { + tstate->exc_info = saved_exc_info; + } } static PyObject * From webhook-mailer at python.org Fri May 22 16:40:26 2020 From: webhook-mailer at python.org (Dennis Sweeney) Date: Fri, 22 May 2020 20:40:26 -0000 Subject: [Python-checkins] bpo-40679: Use the function's qualname in certain TypeErrors (GH-20236) Message-ID: https://github.com/python/cpython/commit/b5cc2089cc354469f12eabc7ba54280e85fdd6dc commit: b5cc2089cc354469f12eabc7ba54280e85fdd6dc branch: master author: Dennis Sweeney <36520290+sweeneyde at users.noreply.github.com> committer: GitHub date: 2020-05-22T13:40:17-07:00 summary: bpo-40679: Use the function's qualname in certain TypeErrors (GH-20236) Patch by Dennis Sweeney. files: A Misc/NEWS.d/next/Core and Builtins/2020-05-19-19-39-49.bpo-40679.SVzz9p.rst M Lib/test/test_call.py M Lib/test/test_keywordonlyarg.py M Python/ceval.c diff --git a/Lib/test/test_call.py b/Lib/test/test_call.py index 451a7170c304d..3f459222748b3 100644 --- a/Lib/test/test_call.py +++ b/Lib/test/test_call.py @@ -8,6 +8,7 @@ import collections import itertools import gc +import contextlib class FunctionCalls(unittest.TestCase): @@ -665,5 +666,52 @@ def __call__(self, *args): self.assertEqual(expected, wrapped(*args, **kwargs)) +class A: + def method_two_args(self, x, y): + pass + + @staticmethod + def static_no_args(): + pass + + @staticmethod + def positional_only(arg, /): + pass + + at cpython_only +class TestErrorMessagesUseQualifiedName(unittest.TestCase): + + @contextlib.contextmanager + def check_raises_type_error(self, message): + with self.assertRaises(TypeError) as cm: + yield + self.assertEqual(str(cm.exception), message) + + def test_missing_arguments(self): + msg = "A.method_two_args() missing 1 required positional argument: 'y'" + with self.check_raises_type_error(msg): + A().method_two_args("x") + + def test_too_many_positional(self): + msg = "A.static_no_args() takes 0 positional arguments but 1 was given" + with self.check_raises_type_error(msg): + A.static_no_args("oops it's an arg") + + def test_positional_only_passed_as_keyword(self): + msg = "A.positional_only() got some positional-only arguments passed as keyword arguments: 'arg'" + with self.check_raises_type_error(msg): + A.positional_only(arg="x") + + def test_unexpected_keyword(self): + msg = "A.method_two_args() got an unexpected keyword argument 'bad'" + with self.check_raises_type_error(msg): + A().method_two_args(bad="x") + + def test_multiple_values(self): + msg = "A.method_two_args() got multiple values for argument 'x'" + with self.check_raises_type_error(msg): + A().method_two_args("x", "y", x="oops") + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_keywordonlyarg.py b/Lib/test/test_keywordonlyarg.py index 2cf8a89a078e8..df82f677a00a4 100644 --- a/Lib/test/test_keywordonlyarg.py +++ b/Lib/test/test_keywordonlyarg.py @@ -63,7 +63,8 @@ def f(a, b=None, *, c=None): pass with self.assertRaises(TypeError) as exc: f(1, 2, 3) - expected = "f() takes from 1 to 2 positional arguments but 3 were given" + expected = (f"{f.__qualname__}() takes from 1 to 2 " + "positional arguments but 3 were given") self.assertEqual(str(exc.exception), expected) def testSyntaxErrorForFunctionCall(self): diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-19-19-39-49.bpo-40679.SVzz9p.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-19-19-39-49.bpo-40679.SVzz9p.rst new file mode 100644 index 0000000000000..2d0a432b6fa69 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-19-19-39-49.bpo-40679.SVzz9p.rst @@ -0,0 +1,2 @@ +Certain :exc:`TypeError` messages about missing or extra arguments now include the function's +:term:`qualified name`. Patch by Dennis Sweeney. diff --git a/Python/ceval.c b/Python/ceval.c index 43ea1c760b17e..a79773f85118a 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -3875,7 +3875,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, PyFrameObject *f, int throwflag) static void format_missing(PyThreadState *tstate, const char *kind, - PyCodeObject *co, PyObject *names) + PyCodeObject *co, PyObject *names, PyObject *qualname) { int err; Py_ssize_t len = PyList_GET_SIZE(names); @@ -3928,7 +3928,7 @@ format_missing(PyThreadState *tstate, const char *kind, return; _PyErr_Format(tstate, PyExc_TypeError, "%U() missing %i required %s argument%s: %U", - co->co_name, + qualname, len, kind, len == 1 ? "" : "s", @@ -3939,7 +3939,7 @@ format_missing(PyThreadState *tstate, const char *kind, static void missing_arguments(PyThreadState *tstate, PyCodeObject *co, Py_ssize_t missing, Py_ssize_t defcount, - PyObject **fastlocals) + PyObject **fastlocals, PyObject *qualname) { Py_ssize_t i, j = 0; Py_ssize_t start, end; @@ -3971,14 +3971,14 @@ missing_arguments(PyThreadState *tstate, PyCodeObject *co, } } assert(j == missing); - format_missing(tstate, kind, co, missing_names); + format_missing(tstate, kind, co, missing_names, qualname); Py_DECREF(missing_names); } static void too_many_positional(PyThreadState *tstate, PyCodeObject *co, Py_ssize_t given, Py_ssize_t defcount, - PyObject **fastlocals) + PyObject **fastlocals, PyObject *qualname) { int plural; Py_ssize_t kwonly_given = 0; @@ -4022,7 +4022,7 @@ too_many_positional(PyThreadState *tstate, PyCodeObject *co, } _PyErr_Format(tstate, PyExc_TypeError, "%U() takes %U positional argument%s but %zd%U %s given", - co->co_name, + qualname, sig, plural ? "s" : "", given, @@ -4034,7 +4034,8 @@ too_many_positional(PyThreadState *tstate, PyCodeObject *co, static int positional_only_passed_as_keyword(PyThreadState *tstate, PyCodeObject *co, - Py_ssize_t kwcount, PyObject* const* kwnames) + Py_ssize_t kwcount, PyObject* const* kwnames, + PyObject *qualname) { int posonly_conflicts = 0; PyObject* posonly_names = PyList_New(0); @@ -4079,7 +4080,7 @@ positional_only_passed_as_keyword(PyThreadState *tstate, PyCodeObject *co, _PyErr_Format(tstate, PyExc_TypeError, "%U() got some positional-only arguments passed" " as keyword arguments: '%U'", - co->co_name, error_names); + qualname, error_names); Py_DECREF(error_names); goto fail; } @@ -4180,7 +4181,7 @@ _PyEval_EvalCode(PyThreadState *tstate, if (keyword == NULL || !PyUnicode_Check(keyword)) { _PyErr_Format(tstate, PyExc_TypeError, "%U() keywords must be strings", - co->co_name); + qualname); goto fail; } @@ -4211,14 +4212,14 @@ _PyEval_EvalCode(PyThreadState *tstate, if (co->co_posonlyargcount && positional_only_passed_as_keyword(tstate, co, - kwcount, kwnames)) + kwcount, kwnames, qualname)) { goto fail; } _PyErr_Format(tstate, PyExc_TypeError, "%U() got an unexpected keyword argument '%S'", - co->co_name, keyword); + qualname, keyword); goto fail; } @@ -4231,7 +4232,7 @@ _PyEval_EvalCode(PyThreadState *tstate, if (GETLOCAL(j) != NULL) { _PyErr_Format(tstate, PyExc_TypeError, "%U() got multiple values for argument '%S'", - co->co_name, keyword); + qualname, keyword); goto fail; } Py_INCREF(value); @@ -4240,7 +4241,7 @@ _PyEval_EvalCode(PyThreadState *tstate, /* Check the number of positional arguments */ if ((argcount > co->co_argcount) && !(co->co_flags & CO_VARARGS)) { - too_many_positional(tstate, co, argcount, defcount, fastlocals); + too_many_positional(tstate, co, argcount, defcount, fastlocals, qualname); goto fail; } @@ -4254,7 +4255,7 @@ _PyEval_EvalCode(PyThreadState *tstate, } } if (missing) { - missing_arguments(tstate, co, missing, defcount, fastlocals); + missing_arguments(tstate, co, missing, defcount, fastlocals, qualname); goto fail; } if (n > m) @@ -4292,7 +4293,7 @@ _PyEval_EvalCode(PyThreadState *tstate, missing++; } if (missing) { - missing_arguments(tstate, co, missing, -1, fastlocals); + missing_arguments(tstate, co, missing, -1, fastlocals, qualname); goto fail; } } From webhook-mailer at python.org Fri May 22 17:43:59 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 22 May 2020 21:43:59 -0000 Subject: [Python-checkins] Reword aware/naive introduction sentence (GH-20175) Message-ID: https://github.com/python/cpython/commit/0714c4871970b836d7fbfaa60c475a70cd1644a7 commit: 0714c4871970b836d7fbfaa60c475a70cd1644a7 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-22T14:43:51-07:00 summary: Reword aware/naive introduction sentence (GH-20175) This is more informative and avoids the question of whether the period should go inside or outside the quotation marks. See also GH-20007. (cherry picked from commit 2e76820a50b8ce2a9a5f6cdef6cef1859a89c460) Co-authored-by: Mathieu Dupuy files: M Doc/library/datetime.rst diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 577cc0e1c1f4b..b733c4406af71 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -35,7 +35,8 @@ on efficient attribute extraction for output formatting and manipulation. Aware and Naive Objects ----------------------- -Date and time objects may be categorized as "aware" or "naive". +Date and time objects may be categorized as "aware" or "naive" depending on +whether or not they include timezone information. With sufficient knowledge of applicable algorithmic and political time adjustments, such as time zone and daylight saving time information, From webhook-mailer at python.org Fri May 22 18:22:56 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 22 May 2020 22:22:56 -0000 Subject: [Python-checkins] bpo-40327: Improve atomicity, speed, and memory efficiency of the items() loop (GH-19628) Message-ID: https://github.com/python/cpython/commit/16d07812dd3833295cc001d19eea42eecbdb6ea5 commit: 16d07812dd3833295cc001d19eea42eecbdb6ea5 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-22T15:22:51-07:00 summary: bpo-40327: Improve atomicity, speed, and memory efficiency of the items() loop (GH-19628) (cherry picked from commit 75bedbe2ed4119ff18a2ea86c544b3cf08a92e75) Co-authored-by: Raymond Hettinger files: M Lib/pickle.py diff --git a/Lib/pickle.py b/Lib/pickle.py index 515cb8a0bb3f1..af50a9b0c06bb 100644 --- a/Lib/pickle.py +++ b/Lib/pickle.py @@ -339,7 +339,7 @@ def whichmodule(obj, name): return module_name # Protect the iteration by using a list copy of sys.modules against dynamic # modules that trigger imports of other modules upon calls to getattr. - for module_name, module in list(sys.modules.items()): + for module_name, module in sys.modules.copy().items(): if module_name == '__main__' or module is None: continue try: From webhook-mailer at python.org Fri May 22 18:24:50 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 22 May 2020 22:24:50 -0000 Subject: [Python-checkins] bpo-40214: Fix ctypes WinDLL test with insecure flags (GH-19652) Message-ID: https://github.com/python/cpython/commit/0cc7becde0bfe896fd23b5cb14fedfb8f2066fca commit: 0cc7becde0bfe896fd23b5cb14fedfb8f2066fca branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-22T15:24:45-07:00 summary: bpo-40214: Fix ctypes WinDLL test with insecure flags (GH-19652) (cherry picked from commit 9b498939009f49b8c772c89e8fc80efbfd8afcb5) Co-authored-by: Steve Dower files: M Lib/ctypes/test/test_loading.py diff --git a/Lib/ctypes/test/test_loading.py b/Lib/ctypes/test/test_loading.py index a62044e370af6..5c48b0db4c393 100644 --- a/Lib/ctypes/test/test_loading.py +++ b/Lib/ctypes/test/test_loading.py @@ -158,11 +158,9 @@ def should_fail(command): # Relative path (but not just filename) should succeed should_pass("WinDLL('./_sqlite3.dll')") - # XXX: This test has started failing on Azure Pipelines CI. See - # bpo-40214 for more information. - if 0: - # Insecure load flags should succeed - should_pass("WinDLL('_sqlite3.dll', winmode=0)") + # Insecure load flags should succeed + # Clear the DLL directory to avoid safe search settings propagating + should_pass("windll.kernel32.SetDllDirectoryW(None); WinDLL('_sqlite3.dll', winmode=0)") # Full path load without DLL_LOAD_DIR shouldn't find dependency should_fail("WinDLL(nt._getfullpathname('_sqlite3.dll'), " + From webhook-mailer at python.org Fri May 22 18:32:39 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Fri, 22 May 2020 22:32:39 -0000 Subject: [Python-checkins] [3.7] bpo-40663: Correctly handle annotations with subscripts in ast_unparse.c (GH-20156). (GH-20192) Message-ID: https://github.com/python/cpython/commit/43300148c5f30317ebf767aa8853a957ee5c87fb commit: 43300148c5f30317ebf767aa8853a957ee5c87fb branch: 3.7 author: Batuhan Taskaya committer: GitHub date: 2020-05-22T23:32:34+01:00 summary: [3.7] bpo-40663: Correctly handle annotations with subscripts in ast_unparse.c (GH-20156). (GH-20192) (cherry picked from commit 2135e10dc717c00d10d899d232bebfc59bb25032) Co-authored-by: Batuhan Taskaya files: A Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst M Lib/test/test_future.py M Python/ast_unparse.c diff --git a/Lib/test/test_future.py b/Lib/test/test_future.py index 2aed01095aa7e..13a4f6f3cc6a3 100644 --- a/Lib/test/test_future.py +++ b/Lib/test/test_future.py @@ -237,6 +237,10 @@ def test_annotations(self): eq("dict[str, int]") eq("set[str,]") eq("tuple[str, ...]") + eq("tuple[(str, *types)]") + eq("tuple[xx:yy, (*types,)]") + eq("tuple[str, int, (str, int)]") + eq("tuple[(*int, str, str, (str, int))]") eq("tuple[str, int, float, dict[str, int]]") eq("slice[0]") eq("slice[0:1]") diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst new file mode 100644 index 0000000000000..5041abc7e3eaa --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst @@ -0,0 +1,2 @@ +Correctly generate annotations where parentheses are omitted but required +(e.g: ``Type[(str, int, *other))]``. diff --git a/Python/ast_unparse.c b/Python/ast_unparse.c index 93b3ecffb017e..7ed4b4f180823 100644 --- a/Python/ast_unparse.c +++ b/Python/ast_unparse.c @@ -743,6 +743,24 @@ append_ast_ext_slice(_PyUnicodeWriter *writer, slice_ty slice) return 0; } +static int +append_ast_index_slice(_PyUnicodeWriter *writer, slice_ty slice) +{ + int level = PR_TUPLE; + expr_ty value = slice->v.Index.value; + if (value->kind == Tuple_kind) { + for (Py_ssize_t i = 0; i < asdl_seq_LEN(value->v.Tuple.elts); i++) { + expr_ty element = asdl_seq_GET(value->v.Tuple.elts, i); + if (element->kind == Starred_kind) { + ++level; + break; + } + } + } + APPEND_EXPR(value, level); + return 0; +} + static int append_ast_slice(_PyUnicodeWriter *writer, slice_ty slice) { @@ -752,8 +770,7 @@ append_ast_slice(_PyUnicodeWriter *writer, slice_ty slice) case ExtSlice_kind: return append_ast_ext_slice(writer, slice); case Index_kind: - APPEND_EXPR(slice->v.Index.value, PR_TUPLE); - return 0; + return append_ast_index_slice(writer, slice); default: PyErr_SetString(PyExc_SystemError, "unexpected slice kind"); From webhook-mailer at python.org Fri May 22 18:32:45 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Fri, 22 May 2020 22:32:45 -0000 Subject: [Python-checkins] [3.8] bpo-40663: Correctly handle annotations with subscripts in ast_unparse.c (GH-20156). (GH-20191) Message-ID: https://github.com/python/cpython/commit/a4d219b35e35f9efc406cd70f2812275bcd989fe commit: a4d219b35e35f9efc406cd70f2812275bcd989fe branch: 3.8 author: Batuhan Taskaya committer: GitHub date: 2020-05-22T23:32:40+01:00 summary: [3.8] bpo-40663: Correctly handle annotations with subscripts in ast_unparse.c (GH-20156). (GH-20191) (cherry picked from commit 2135e10dc717c00d10d899d232bebfc59bb25032) Co-authored-by: Batuhan Taskaya files: A Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst M Lib/test/test_future.py M Python/ast_unparse.c diff --git a/Lib/test/test_future.py b/Lib/test/test_future.py index ea13533b5143d..57946467d6816 100644 --- a/Lib/test/test_future.py +++ b/Lib/test/test_future.py @@ -265,6 +265,10 @@ def test_annotations(self): eq("dict[str, int]") eq("set[str,]") eq("tuple[str, ...]") + eq("tuple[(str, *types)]") + eq("tuple[xx:yy, (*types,)]") + eq("tuple[str, int, (str, int)]") + eq("tuple[(*int, str, str, (str, int))]") eq("tuple[str, int, float, dict[str, int]]") eq("slice[0]") eq("slice[0:1]") diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst new file mode 100644 index 0000000000000..5041abc7e3eaa --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst @@ -0,0 +1,2 @@ +Correctly generate annotations where parentheses are omitted but required +(e.g: ``Type[(str, int, *other))]``. diff --git a/Python/ast_unparse.c b/Python/ast_unparse.c index 1a7cd236aafa1..af9604eb45b18 100644 --- a/Python/ast_unparse.c +++ b/Python/ast_unparse.c @@ -750,6 +750,24 @@ append_ast_ext_slice(_PyUnicodeWriter *writer, slice_ty slice) return 0; } +static int +append_ast_index_slice(_PyUnicodeWriter *writer, slice_ty slice) +{ + int level = PR_TUPLE; + expr_ty value = slice->v.Index.value; + if (value->kind == Tuple_kind) { + for (Py_ssize_t i = 0; i < asdl_seq_LEN(value->v.Tuple.elts); i++) { + expr_ty element = asdl_seq_GET(value->v.Tuple.elts, i); + if (element->kind == Starred_kind) { + ++level; + break; + } + } + } + APPEND_EXPR(value, level); + return 0; +} + static int append_ast_slice(_PyUnicodeWriter *writer, slice_ty slice) { @@ -759,8 +777,7 @@ append_ast_slice(_PyUnicodeWriter *writer, slice_ty slice) case ExtSlice_kind: return append_ast_ext_slice(writer, slice); case Index_kind: - APPEND_EXPR(slice->v.Index.value, PR_TUPLE); - return 0; + return append_ast_index_slice(writer, slice); default: PyErr_SetString(PyExc_SystemError, "unexpected slice kind"); From webhook-mailer at python.org Fri May 22 20:29:42 2020 From: webhook-mailer at python.org (Antoine) Date: Sat, 23 May 2020 00:29:42 -0000 Subject: [Python-checkins] bpo-40552 Add 'users' variable in code sample (tutorial 4.2). (GH-19992) Message-ID: https://github.com/python/cpython/commit/6fad3e6b49f6a9f8b8a6635c41371e4451479f86 commit: 6fad3e6b49f6a9f8b8a6635c41371e4451479f86 branch: master author: Antoine <43954001+awecx at users.noreply.github.com> committer: GitHub date: 2020-05-22T21:29:34-03:00 summary: bpo-40552 Add 'users' variable in code sample (tutorial 4.2). (GH-19992) * Add 'users' variable in code sample. * ?? Added by blurb_it. Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> files: A Misc/NEWS.d/next/Documentation/2020-05-09-12-10-31.bpo-40552._0uB73.rst M Doc/tutorial/controlflow.rst diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst index f05f5edd5ccc4..26de866aab90c 100644 --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -70,6 +70,9 @@ Code that modifies a collection while iterating over that same collection can be tricky to get right. Instead, it is usually more straight-forward to loop over a copy of the collection or to create a new collection:: + # Create a sample collection + users = {'Hans': 'active', '?l?onore': 'inactive', '???': 'active'} + # Strategy: Iterate over a copy for user, status in users.copy().items(): if status == 'inactive': diff --git a/Misc/NEWS.d/next/Documentation/2020-05-09-12-10-31.bpo-40552._0uB73.rst b/Misc/NEWS.d/next/Documentation/2020-05-09-12-10-31.bpo-40552._0uB73.rst new file mode 100644 index 0000000000000..5ed9c31834ac2 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-09-12-10-31.bpo-40552._0uB73.rst @@ -0,0 +1,2 @@ +Fix in tutorial section 4.2. +Code snippet is now correct. \ No newline at end of file From webhook-mailer at python.org Fri May 22 21:05:12 2020 From: webhook-mailer at python.org (Sardorbek Imomaliev) Date: Sat, 23 May 2020 01:05:12 -0000 Subject: [Python-checkins] [doc] Add missing space in urllib.request.rst (GH-19860) Message-ID: https://github.com/python/cpython/commit/8022eb4e971f549825f2581e83ee3c028f01329e commit: 8022eb4e971f549825f2581e83ee3c028f01329e branch: master author: Sardorbek Imomaliev committer: GitHub date: 2020-05-22T18:05:05-07:00 summary: [doc] Add missing space in urllib.request.rst (GH-19860) This is just a typo fix, should I still provide news file? files: M Doc/library/urllib.request.rst diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 03712c1f4a6ee..288ce14d36f01 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -1235,7 +1235,7 @@ Here is an example of doing a ``PUT`` request using :class:`Request`:: import urllib.request DATA = b'some data' - req = urllib.request.Request(url='http://localhost:8080', data=DATA,method='PUT') + req = urllib.request.Request(url='http://localhost:8080', data=DATA, method='PUT') with urllib.request.urlopen(req) as f: pass print(f.status) From webhook-mailer at python.org Fri May 22 21:12:15 2020 From: webhook-mailer at python.org (Matteo Bertucci) Date: Sat, 23 May 2020 01:12:15 -0000 Subject: [Python-checkins] bpo-40439: Update broken link in lexical analysis docs (GH-20184) Message-ID: https://github.com/python/cpython/commit/af23f0d3cf19343512e6ca1fe1d46a5dbe425719 commit: af23f0d3cf19343512e6ca1fe1d46a5dbe425719 branch: master author: Matteo Bertucci committer: GitHub date: 2020-05-22T18:12:09-07:00 summary: bpo-40439: Update broken link in lexical analysis docs (GH-20184) Automerge-Triggered-By: @csabella files: M Doc/reference/lexical_analysis.rst diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst index 3f420817eefea..e3a3a88757ed2 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -325,7 +325,7 @@ of identifiers is based on NFKC. A non-normative HTML file listing all valid identifier characters for Unicode 4.1 can be found at -https://www.dcl.hpi.uni-potsdam.de/home/loewis/table-3131.html. +https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt .. _keywords: From webhook-mailer at python.org Fri May 22 21:17:52 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 23 May 2020 01:17:52 -0000 Subject: [Python-checkins] bpo-40439: Update broken link in lexical analysis docs (GH-20184) Message-ID: https://github.com/python/cpython/commit/0af9bef61afffbf128aba76a2e578059621b4f00 commit: 0af9bef61afffbf128aba76a2e578059621b4f00 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-22T18:17:45-07:00 summary: bpo-40439: Update broken link in lexical analysis docs (GH-20184) Automerge-Triggered-By: @csabella (cherry picked from commit af23f0d3cf19343512e6ca1fe1d46a5dbe425719) Co-authored-by: Matteo Bertucci files: M Doc/reference/lexical_analysis.rst diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst index f6f250e391500..b6eaacb46591a 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -325,7 +325,7 @@ of identifiers is based on NFKC. A non-normative HTML file listing all valid identifier characters for Unicode 4.1 can be found at -https://www.dcl.hpi.uni-potsdam.de/home/loewis/table-3131.html. +https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt .. _keywords: From webhook-mailer at python.org Fri May 22 21:20:06 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 23 May 2020 01:20:06 -0000 Subject: [Python-checkins] bpo-40439: Update broken link in lexical analysis docs (GH-20184) Message-ID: https://github.com/python/cpython/commit/dc3239177ff26cb6a12e437a1f507be730fe8ba7 commit: dc3239177ff26cb6a12e437a1f507be730fe8ba7 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-22T18:20:01-07:00 summary: bpo-40439: Update broken link in lexical analysis docs (GH-20184) Automerge-Triggered-By: @csabella (cherry picked from commit af23f0d3cf19343512e6ca1fe1d46a5dbe425719) Co-authored-by: Matteo Bertucci files: M Doc/reference/lexical_analysis.rst diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst index 844bd7cc870e8..d0e7fde0f5ae3 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -325,7 +325,7 @@ of identifiers is based on NFKC. A non-normative HTML file listing all valid identifier characters for Unicode 4.1 can be found at -https://www.dcl.hpi.uni-potsdam.de/home/loewis/table-3131.html. +https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt .. _keywords: From webhook-mailer at python.org Sat May 23 04:00:23 2020 From: webhook-mailer at python.org (Rafael Fontenelle) Date: Sat, 23 May 2020 08:00:23 -0000 Subject: [Python-checkins] Add pt-br switcher to the Documentation website (GH-20301) Message-ID: https://github.com/python/cpython/commit/763b193c96bf07a83a991985028e9ef272931b2d commit: 763b193c96bf07a83a991985028e9ef272931b2d branch: 3.6 author: Rafael Fontenelle committer: GitHub date: 2020-05-23T04:00:10-04:00 summary: Add pt-br switcher to the Documentation website (GH-20301) files: M Doc/tools/static/switchers.js diff --git a/Doc/tools/static/switchers.js b/Doc/tools/static/switchers.js index 9eb8ec8809aea..3b6de6f3d90fd 100644 --- a/Doc/tools/static/switchers.js +++ b/Doc/tools/static/switchers.js @@ -24,6 +24,7 @@ 'fr': 'French', 'ja': 'Japanese', 'ko': 'Korean', + 'pt-br': 'Brazilian Portuguese', }; function build_version_select(current_version, current_release) { From webhook-mailer at python.org Sat May 23 05:59:15 2020 From: webhook-mailer at python.org (Shantanu) Date: Sat, 23 May 2020 09:59:15 -0000 Subject: [Python-checkins] bpo-34556: Document addition of upgrade_deps to venv.create (GH-20135) Message-ID: https://github.com/python/cpython/commit/1cba1c9abadf76f458ecf883a48515aa3b534dbd commit: 1cba1c9abadf76f458ecf883a48515aa3b534dbd branch: master author: Shantanu committer: GitHub date: 2020-05-23T02:58:50-07:00 summary: bpo-34556: Document addition of upgrade_deps to venv.create (GH-20135) files: M Doc/library/venv.rst diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index 8abadc4df3cac..5d4a36481f1dc 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -249,7 +249,8 @@ creation according to their needs, the :class:`EnvBuilder` class. There is also a module-level convenience function: .. function:: create(env_dir, system_site_packages=False, clear=False, \ - symlinks=False, with_pip=False, prompt=None) + symlinks=False, with_pip=False, prompt=None, \ + upgrade_deps=False) Create an :class:`EnvBuilder` with the given keyword arguments, and call its :meth:`~EnvBuilder.create` method with the *env_dir* argument. @@ -262,6 +263,9 @@ There is also a module-level convenience function: .. versionchanged:: 3.6 Added the ``prompt`` parameter + .. versionchanged:: 3.9 + Added the ``upgrade_deps`` parameter + An example of extending ``EnvBuilder`` -------------------------------------- From webhook-mailer at python.org Sat May 23 19:14:38 2020 From: webhook-mailer at python.org (Bar Harel) Date: Sat, 23 May 2020 23:14:38 -0000 Subject: [Python-checkins] bpo-40405: Fix asyncio.as_completed docs (GH-19753) Message-ID: https://github.com/python/cpython/commit/13206b52d16c2489f4c7dd2dce2a7f48a554b5ed commit: 13206b52d16c2489f4c7dd2dce2a7f48a554b5ed branch: master author: Bar Harel committer: GitHub date: 2020-05-23T19:14:31-04:00 summary: bpo-40405: Fix asyncio.as_completed docs (GH-19753) * Fix as_completed docs to correctly state the function return value. * Also, improves the general wording of the as_completed documentation. Co-Authored-By: R?mi Lapeyre Co-Authored-By: Kyle Stanley Co-Authored-By: Yury Selivanov files: M Doc/library/asyncio-task.rst diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index dd94c14854835..847363b134a7a 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -585,9 +585,9 @@ Waiting Primitives .. function:: as_completed(aws, \*, loop=None, timeout=None) Run :ref:`awaitable objects ` in the *aws* - set concurrently. Return an iterator of :class:`Future` objects. - Each Future object returned represents the earliest result - from the set of the remaining awaitables. + set concurrently. Return an iterator of coroutines. + Each coroutine returned can be awaited to get the earliest next + result from the set of the remaining awaitables. Raises :exc:`asyncio.TimeoutError` if the timeout occurs before all Futures are done. @@ -597,8 +597,8 @@ Waiting Primitives Example:: - for f in as_completed(aws): - earliest_result = await f + for coro in as_completed(aws): + earliest_result = await coro # ... From webhook-mailer at python.org Sat May 23 19:24:07 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 23 May 2020 23:24:07 -0000 Subject: [Python-checkins] bpo-40405: Fix asyncio.as_completed docs (GH-19753) Message-ID: https://github.com/python/cpython/commit/2fecb48a1d84190c37214eb4b0c8d5460300a78b commit: 2fecb48a1d84190c37214eb4b0c8d5460300a78b branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-23T16:24:03-07:00 summary: bpo-40405: Fix asyncio.as_completed docs (GH-19753) * Fix as_completed docs to correctly state the function return value. * Also, improves the general wording of the as_completed documentation. Co-Authored-By: R?mi Lapeyre Co-Authored-By: Kyle Stanley Co-Authored-By: Yury Selivanov (cherry picked from commit 13206b52d16c2489f4c7dd2dce2a7f48a554b5ed) Co-authored-by: Bar Harel files: M Doc/library/asyncio-task.rst diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 1a23661fc772e..00ce5d4b72bdd 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -580,9 +580,9 @@ Waiting Primitives .. function:: as_completed(aws, \*, loop=None, timeout=None) Run :ref:`awaitable objects ` in the *aws* - set concurrently. Return an iterator of :class:`Future` objects. - Each Future object returned represents the earliest result - from the set of the remaining awaitables. + set concurrently. Return an iterator of coroutines. + Each coroutine returned can be awaited to get the earliest next + result from the set of the remaining awaitables. Raises :exc:`asyncio.TimeoutError` if the timeout occurs before all Futures are done. @@ -592,8 +592,8 @@ Waiting Primitives Example:: - for f in as_completed(aws): - earliest_result = await f + for coro in as_completed(aws): + earliest_result = await coro # ... From webhook-mailer at python.org Sun May 24 01:01:42 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sun, 24 May 2020 05:01:42 -0000 Subject: [Python-checkins] Use Py_ssize_t for the column number in the PEG support code (GH-20341) Message-ID: https://github.com/python/cpython/commit/b23d7adfdfa66dd8e6f98e968b1ba885692b67d6 commit: b23d7adfdfa66dd8e6f98e968b1ba885692b67d6 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-24T06:01:34+01:00 summary: Use Py_ssize_t for the column number in the PEG support code (GH-20341) files: M Parser/pegen/pegen.c diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index f1e3f9efb2f6b..9cade2a476de3 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -408,7 +408,7 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, } } - int col_number = byte_offset_to_character_offset(error_line, col_offset); + Py_ssize_t col_number = byte_offset_to_character_offset(error_line, col_offset); tmp = Py_BuildValue("(OiiN)", p->tok->filename, lineno, col_number, error_line); if (!tmp) { From webhook-mailer at python.org Sun May 24 01:03:54 2020 From: webhook-mailer at python.org (Hai Shi) Date: Sun, 24 May 2020 05:03:54 -0000 Subject: [Python-checkins] Remove spurious NULL in descrobject.c (GH-20344) Message-ID: https://github.com/python/cpython/commit/3f5f61409ebf95fa606bcbb15dfaaadad6084dc6 commit: 3f5f61409ebf95fa606bcbb15dfaaadad6084dc6 branch: master author: Hai Shi committer: GitHub date: 2020-05-24T06:03:50+01:00 summary: Remove spurious NULL in descrobject.c (GH-20344) Co-authored-by: hai shi files: M Objects/descrobject.c diff --git a/Objects/descrobject.c b/Objects/descrobject.c index c29cf7a4c4464..fce9cdd309077 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -360,7 +360,6 @@ method_vectorcall_FASTCALL_KEYWORDS_METHOD( if (method_check_args(func, args, nargs, NULL)) { return NULL; } - NULL; PyCMethod meth = (PyCMethod) method_enter_call(tstate, func); if (meth == NULL) { return NULL; From webhook-mailer at python.org Sun May 24 06:03:57 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Sun, 24 May 2020 10:03:57 -0000 Subject: [Python-checkins] bpo-37973: Improve the docstrings of sys.float_info (GH-19218) Message-ID: https://github.com/python/cpython/commit/fdc5a94279736a7715dd35c5354a3c27098d0897 commit: fdc5a94279736a7715dd35c5354a3c27098d0897 branch: master author: Zackery Spytz committer: GitHub date: 2020-05-24T11:03:52+01:00 summary: bpo-37973: Improve the docstrings of sys.float_info (GH-19218) Taken from https://docs.python.org/3/library/sys.html#sys.float_info files: M Objects/floatobject.c diff --git a/Objects/floatobject.c b/Objects/floatobject.c index 9f5014092cf20..cc0ae8ce81908 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -59,12 +59,14 @@ static PyStructSequence_Field floatinfo_fields[] = { "is a normalized float"}, {"min_10_exp", "DBL_MIN_10_EXP -- minimum int e such that 10**e is " "a normalized"}, - {"dig", "DBL_DIG -- digits"}, + {"dig", "DBL_DIG -- maximum number of decimal digits that " + "can be faithfully represented in a float"}, {"mant_dig", "DBL_MANT_DIG -- mantissa digits"}, {"epsilon", "DBL_EPSILON -- Difference between 1 and the next " "representable float"}, {"radix", "FLT_RADIX -- radix of exponent"}, - {"rounds", "FLT_ROUNDS -- rounding mode"}, + {"rounds", "FLT_ROUNDS -- rounding mode used for arithmetic " + "operations"}, {0} }; From webhook-mailer at python.org Sun May 24 06:54:00 2020 From: webhook-mailer at python.org (Florian Dahlitz) Date: Sun, 24 May 2020 10:54:00 -0000 Subject: [Python-checkins] bpo-40723: Make IDLE autocomplete test run without __main__.__file__ (GH-20311) Message-ID: https://github.com/python/cpython/commit/905b3cd05f8d2c29e1605d109900e3e9d07af4d3 commit: 905b3cd05f8d2c29e1605d109900e3e9d07af4d3 branch: master author: Florian Dahlitz committer: GitHub date: 2020-05-24T06:53:44-04:00 summary: bpo-40723: Make IDLE autocomplete test run without __main__.__file__ (GH-20311) This was the only failure running unittest.main(test.test_idle) after imports. files: A Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/idle_test/test_autocomplete.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 46b15234a19c6..b112e8ea293a9 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -3,6 +3,8 @@ Released on 2020-10-05? ====================================== +bpo-40723: Make test_idle pass when run after import. + bpo-38689: IDLE will no longer freeze when inspect.signature fails when fetching a calltip. diff --git a/Lib/idlelib/idle_test/test_autocomplete.py b/Lib/idlelib/idle_test/test_autocomplete.py index 2c478cd5c2a14..1841495fcf1a0 100644 --- a/Lib/idlelib/idle_test/test_autocomplete.py +++ b/Lib/idlelib/idle_test/test_autocomplete.py @@ -227,7 +227,7 @@ def test_fetch_completions(self): acp = self.autocomplete small, large = acp.fetch_completions( '', ac.ATTRS) - if __main__.__file__ != ac.__file__: + if hasattr(__main__, '__file__') and __main__.__file__ != ac.__file__: self.assertNotIn('AutoComplete', small) # See issue 36405. # Test attributes diff --git a/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst b/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst new file mode 100644 index 0000000000000..e0de2f9d83668 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst @@ -0,0 +1 @@ +Make test_idle pass when run after import. From webhook-mailer at python.org Sun May 24 07:08:08 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 24 May 2020 11:08:08 -0000 Subject: [Python-checkins] bpo-40723: Make IDLE autocomplete test run without __main__.__file__ (GH-20311) Message-ID: https://github.com/python/cpython/commit/a64df485a4c3ebb0caa1d62c02246cd43d0e976e commit: a64df485a4c3ebb0caa1d62c02246cd43d0e976e branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-24T04:08:04-07:00 summary: bpo-40723: Make IDLE autocomplete test run without __main__.__file__ (GH-20311) This was the only failure running unittest.main(test.test_idle) after imports. (cherry picked from commit 905b3cd05f8d2c29e1605d109900e3e9d07af4d3) Co-authored-by: Florian Dahlitz files: A Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/idle_test/test_autocomplete.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index de7543e370198..debd2c79bf2ae 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -3,6 +3,8 @@ Released on 2019-12-16? ====================================== +bpo-40723: Make test_idle pass when run after import. + bpo-38689: IDLE will no longer freeze when inspect.signature fails when fetching a calltip. diff --git a/Lib/idlelib/idle_test/test_autocomplete.py b/Lib/idlelib/idle_test/test_autocomplete.py index 2c478cd5c2a14..1841495fcf1a0 100644 --- a/Lib/idlelib/idle_test/test_autocomplete.py +++ b/Lib/idlelib/idle_test/test_autocomplete.py @@ -227,7 +227,7 @@ def test_fetch_completions(self): acp = self.autocomplete small, large = acp.fetch_completions( '', ac.ATTRS) - if __main__.__file__ != ac.__file__: + if hasattr(__main__, '__file__') and __main__.__file__ != ac.__file__: self.assertNotIn('AutoComplete', small) # See issue 36405. # Test attributes diff --git a/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst b/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst new file mode 100644 index 0000000000000..e0de2f9d83668 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst @@ -0,0 +1 @@ +Make test_idle pass when run after import. From webhook-mailer at python.org Sun May 24 07:12:16 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 24 May 2020 11:12:16 -0000 Subject: [Python-checkins] bpo-40723: Make IDLE autocomplete test run without __main__.__file__ (GH-20311) Message-ID: https://github.com/python/cpython/commit/82397e2d97f89fdf36cb8eaf3b2d7c407456ec78 commit: 82397e2d97f89fdf36cb8eaf3b2d7c407456ec78 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-24T04:12:11-07:00 summary: bpo-40723: Make IDLE autocomplete test run without __main__.__file__ (GH-20311) This was the only failure running unittest.main(test.test_idle) after imports. (cherry picked from commit 905b3cd05f8d2c29e1605d109900e3e9d07af4d3) Co-authored-by: Florian Dahlitz files: A Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/idle_test/test_autocomplete.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 93084bfc2e554..7d77f6cf4529e 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -3,6 +3,8 @@ Released on 2019-12-16? ====================================== +bpo-40723: Make test_idle pass when run after import. + bpo-38689: IDLE will no longer freeze when inspect.signature fails when fetching a calltip. diff --git a/Lib/idlelib/idle_test/test_autocomplete.py b/Lib/idlelib/idle_test/test_autocomplete.py index 2c478cd5c2a14..1841495fcf1a0 100644 --- a/Lib/idlelib/idle_test/test_autocomplete.py +++ b/Lib/idlelib/idle_test/test_autocomplete.py @@ -227,7 +227,7 @@ def test_fetch_completions(self): acp = self.autocomplete small, large = acp.fetch_completions( '', ac.ATTRS) - if __main__.__file__ != ac.__file__: + if hasattr(__main__, '__file__') and __main__.__file__ != ac.__file__: self.assertNotIn('AutoComplete', small) # See issue 36405. # Test attributes diff --git a/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst b/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst new file mode 100644 index 0000000000000..e0de2f9d83668 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst @@ -0,0 +1 @@ +Make test_idle pass when run after import. From webhook-mailer at python.org Sun May 24 09:40:27 2020 From: webhook-mailer at python.org (Terry Jan Reedy) Date: Sun, 24 May 2020 13:40:27 -0000 Subject: [Python-checkins] bpo-37309: Update NEWS.txt for 3.7.8 (GH-20353) Message-ID: https://github.com/python/cpython/commit/3269a0e56b02da61d8f6e7a5e2cca9f83d50694b commit: 3269a0e56b02da61d8f6e7a5e2cca9f83d50694b branch: 3.7 author: Terry Jan Reedy committer: GitHub date: 2020-05-24T09:40:19-04:00 summary: bpo-37309: Update NEWS.txt for 3.7.8 (GH-20353) files: M Lib/idlelib/NEWS.txt diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 7d77f6cf4529e..8226af4a4de6c 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,13 +1,19 @@ -What's New in IDLE 3.7.6 -Released on 2019-12-16? +What's New in IDLE 3.7.8 +Released on 2020-06-27? ====================================== bpo-40723: Make test_idle pass when run after import. +Patch by Florian Dahlitz. bpo-38689: IDLE will no longer freeze when inspect.signature fails when fetching a calltip. + +What's New in IDLE 3.7.7 +Released on 2020-03-10 +====================================== + bpo-27115: For 'Go to Line', use a Query entry box subclass with IDLE standard behavior and improved error checking. @@ -37,6 +43,11 @@ bpo-32989: Add tests for editor newline_and_indent_event method. Remove unneeded arguments and dead code from pyparse find_good_parse_start method. + +What's New in IDLE 3.7.6 +Released on 2019-12-18 +====================================== + bpo-38943: Fix autocomplete windows not always appearing on some systems. Patch by Johnny Najera. From webhook-mailer at python.org Sun May 24 09:57:33 2020 From: webhook-mailer at python.org (Terry Jan Reedy) Date: Sun, 24 May 2020 13:57:33 -0000 Subject: [Python-checkins] [3.8] bpo-37309: Update IDLE NEWS.txt for 3.8.4 (GH-20355) Message-ID: https://github.com/python/cpython/commit/1ae0fd87a072426e35ff84dc6d1b2759d9ebee70 commit: 1ae0fd87a072426e35ff84dc6d1b2759d9ebee70 branch: 3.8 author: Terry Jan Reedy committer: GitHub date: 2020-05-24T09:57:29-04:00 summary: [3.8] bpo-37309: Update IDLE NEWS.txt for 3.8.4 (GH-20355) files: M Lib/idlelib/NEWS.txt diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index debd2c79bf2ae..ff74abf5edde3 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,9 +1,15 @@ -What's New in IDLE 3.8.1 -Released on 2019-12-16? +What's New in IDLE 3.8.4 +Released on 2020-07-03? ====================================== bpo-40723: Make test_idle pass when run after import. +Patch by Florian Dahlitz. + + +What's New in IDLE 3.8.3 +Released on 2020-05-13 +====================================== bpo-38689: IDLE will no longer freeze when inspect.signature fails when fetching a calltip. @@ -21,6 +27,11 @@ bpo-39781: Selecting code context lines no longer causes a jump. bpo-39663: Add tests for pyparse find_good_parse_start(). + +What's New in IDLE 3.8.2 +Released on 2020-02-17 +====================================== + bpo-39600: Remove duplicate font names from configuration list. bpo-38792: Close a shell calltip if a :exc:`KeyboardInterrupt` @@ -37,6 +48,11 @@ bpo-32989: Add tests for editor newline_and_indent_event method. Remove unneeded arguments and dead code from pyparse find_good_parse_start method. + +What's New in IDLE 3.8.1 +Released on 2019-12-18 +====================================== + bpo-38943: Fix autocomplete windows not always appearing on some systems. Patch by Johnny Najera. From webhook-mailer at python.org Sun May 24 09:57:59 2020 From: webhook-mailer at python.org (Terry Jan Reedy) Date: Sun, 24 May 2020 13:57:59 -0000 Subject: [Python-checkins] bpo-37309: Update IDLE NEWS.txt (GH-20356) Message-ID: https://github.com/python/cpython/commit/16ef3241939a3a64a447e5d7aabaf2e29deca621 commit: 16ef3241939a3a64a447e5d7aabaf2e29deca621 branch: master author: Terry Jan Reedy committer: GitHub date: 2020-05-24T09:57:55-04:00 summary: bpo-37309: Update IDLE NEWS.txt (GH-20356) files: M Lib/idlelib/NEWS.txt diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index b112e8ea293a9..709008f78a571 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -4,6 +4,7 @@ Released on 2020-10-05? bpo-40723: Make test_idle pass when run after import. +Patch by Florian Dahlitz. bpo-38689: IDLE will no longer freeze when inspect.signature fails when fetching a calltip. From webhook-mailer at python.org Sun May 24 10:37:16 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Sun, 24 May 2020 14:37:16 -0000 Subject: [Python-checkins] bpo-40443: Remove unused imports in the zoneinfo (GH-20354) Message-ID: https://github.com/python/cpython/commit/3436f5f899f272d7164add072beb18eebd46d777 commit: 3436f5f899f272d7164add072beb18eebd46d777 branch: master author: Dong-hee Na committer: GitHub date: 2020-05-24T23:37:08+09:00 summary: bpo-40443: Remove unused imports in the zoneinfo (GH-20354) files: M Lib/zoneinfo/_tzpath.py M Lib/zoneinfo/_zoneinfo.py diff --git a/Lib/zoneinfo/_tzpath.py b/Lib/zoneinfo/_tzpath.py index c4c671d30dbdd..9e381b6e4434b 100644 --- a/Lib/zoneinfo/_tzpath.py +++ b/Lib/zoneinfo/_tzpath.py @@ -1,5 +1,4 @@ import os -import sys import sysconfig diff --git a/Lib/zoneinfo/_zoneinfo.py b/Lib/zoneinfo/_zoneinfo.py index 69133ae80a493..b207dd346fe25 100644 --- a/Lib/zoneinfo/_zoneinfo.py +++ b/Lib/zoneinfo/_zoneinfo.py @@ -2,10 +2,7 @@ import calendar import collections import functools -import os import re -import struct -import sys import weakref from datetime import datetime, timedelta, timezone, tzinfo From webhook-mailer at python.org Sun May 24 14:57:47 2020 From: webhook-mailer at python.org (Albert) Date: Sun, 24 May 2020 18:57:47 -0000 Subject: [Python-checkins] bpo-17050: Remove documentation on argparse.REMAINDER (GH-18661) Message-ID: https://github.com/python/cpython/commit/59f5022b5d3e5fcc60ac61cc256b627decf8ee68 commit: 59f5022b5d3e5fcc60ac61cc256b627decf8ee68 branch: master author: Albert committer: GitHub date: 2020-05-24T11:57:40-07:00 summary: bpo-17050: Remove documentation on argparse.REMAINDER (GH-18661) Closes [Issue 17050]() by removing argparse.REMAINDER from the documentation, as discussed on the issue. Automerge-Triggered-By: @rhettinger files: M Doc/library/argparse.rst diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index f8e3918968620..5e0096cae73a7 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -961,19 +961,6 @@ values are: usage: PROG [-h] foo [foo ...] PROG: error: the following arguments are required: foo -.. _`argparse.REMAINDER`: - -* ``argparse.REMAINDER``. All the remaining command-line arguments are gathered - into a list. This is commonly useful for command line utilities that dispatch - to other command line utilities:: - - >>> parser = argparse.ArgumentParser(prog='PROG') - >>> parser.add_argument('--foo') - >>> parser.add_argument('command') - >>> parser.add_argument('args', nargs=argparse.REMAINDER) - >>> print(parser.parse_args('--foo B cmd --arg1 XX ZZ'.split())) - Namespace(args=['--arg1', 'XX', 'ZZ'], command='cmd', foo='B') - If the ``nargs`` keyword argument is not provided, the number of arguments consumed is determined by the action_. Generally this means a single command-line argument will be consumed and a single item (not a list) will be produced. From webhook-mailer at python.org Sun May 24 17:31:52 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 24 May 2020 21:31:52 -0000 Subject: [Python-checkins] bpo-36290: Fix keytword collision handling in AST node constructors (GH-12382) Message-ID: https://github.com/python/cpython/commit/907ee1f14aaf587683ced44818c5a1d1cabf4174 commit: 907ee1f14aaf587683ced44818c5a1d1cabf4174 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-24T14:31:47-07:00 summary: bpo-36290: Fix keytword collision handling in AST node constructors (GH-12382) (cherry picked from commit c73914a562580ae72048876cb42ed8e76e2c83f9) Co-authored-by: R?mi Lapeyre files: A Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst M Lib/ast.py M Lib/test/test_ast.py M Parser/asdl_c.py M Python/Python-ast.c diff --git a/Lib/ast.py b/Lib/ast.py index 0c88bcf4c821e..99a1148a3ba24 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -483,6 +483,13 @@ def __instancecheck__(cls, inst): return type.__instancecheck__(cls, inst) def _new(cls, *args, **kwargs): + for key in kwargs: + if key not in cls._fields: + # arbitrary keyword arguments are accepted + continue + pos = cls._fields.index(key) + if pos < len(args): + raise TypeError(f"{cls.__name__} got multiple values for argument {key!r}") if cls in _const_types: return Constant(*args, **kwargs) return Constant.__new__(cls, *args, **kwargs) diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 8887558ce4c31..486f2aa707e83 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -387,6 +387,15 @@ def test_classattrs(self): self.assertRaises(TypeError, ast.Num, 1, None, 2) self.assertRaises(TypeError, ast.Num, 1, None, 2, lineno=0) + # Arbitrary keyword arguments are supported + self.assertEqual(ast.Constant(1, foo='bar').foo, 'bar') + self.assertEqual(ast.Num(1, foo='bar').foo, 'bar') + + with self.assertRaisesRegex(TypeError, "Num got multiple values for argument 'n'"): + ast.Num(1, n=2) + with self.assertRaisesRegex(TypeError, "Constant got multiple values for argument 'value'"): + ast.Constant(1, value=2) + self.assertEqual(ast.Num(42).n, 42) self.assertEqual(ast.Num(4.25).n, 4.25) self.assertEqual(ast.Num(4.25j).n, 4.25j) diff --git a/Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst b/Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst new file mode 100644 index 0000000000000..a9afe62b0c46e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst @@ -0,0 +1,2 @@ +AST nodes are now raising :exc:`TypeError` on conflicting keyword arguments. +Patch contributed by R?mi Lapeyre. diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 574fcb0e2faaf..a708b66dbe917 100644 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -665,8 +665,9 @@ def visitModule(self, mod): } if (fields) { numfields = PySequence_Size(fields); - if (numfields == -1) + if (numfields == -1) { goto cleanup; + } } res = 0; /* if no error occurs, this stays 0 to the end */ @@ -687,15 +688,35 @@ def visitModule(self, mod): } res = PyObject_SetAttr(self, name, PyTuple_GET_ITEM(args, i)); Py_DECREF(name); - if (res < 0) + if (res < 0) { goto cleanup; + } } if (kw) { i = 0; /* needed by PyDict_Next */ while (PyDict_Next(kw, &i, &key, &value)) { + int contains = PySequence_Contains(fields, key); + if (contains == -1) { + res = -1; + goto cleanup; + } else if (contains == 1) { + Py_ssize_t p = PySequence_Index(fields, key); + if (p == -1) { + res = -1; + goto cleanup; + } + if (p < PyTuple_GET_SIZE(args)) { + PyErr_Format(PyExc_TypeError, + "%.400s got multiple values for argument '%U'", + Py_TYPE(self)->tp_name, key); + res = -1; + goto cleanup; + } + } res = PyObject_SetAttr(self, key, value); - if (res < 0) + if (res < 0) { goto cleanup; + } } } cleanup: diff --git a/Python/Python-ast.c b/Python/Python-ast.c index f73f035845f8c..bcf9456942c6f 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -571,8 +571,9 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) } if (fields) { numfields = PySequence_Size(fields); - if (numfields == -1) + if (numfields == -1) { goto cleanup; + } } res = 0; /* if no error occurs, this stays 0 to the end */ @@ -593,15 +594,35 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) } res = PyObject_SetAttr(self, name, PyTuple_GET_ITEM(args, i)); Py_DECREF(name); - if (res < 0) + if (res < 0) { goto cleanup; + } } if (kw) { i = 0; /* needed by PyDict_Next */ while (PyDict_Next(kw, &i, &key, &value)) { + int contains = PySequence_Contains(fields, key); + if (contains == -1) { + res = -1; + goto cleanup; + } else if (contains == 1) { + Py_ssize_t p = PySequence_Index(fields, key); + if (p == -1) { + res = -1; + goto cleanup; + } + if (p < PyTuple_GET_SIZE(args)) { + PyErr_Format(PyExc_TypeError, + "%.400s got multiple values for argument '%U'", + Py_TYPE(self)->tp_name, key); + res = -1; + goto cleanup; + } + } res = PyObject_SetAttr(self, key, value); - if (res < 0) + if (res < 0) { goto cleanup; + } } } cleanup: From webhook-mailer at python.org Sun May 24 18:20:23 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sun, 24 May 2020 22:20:23 -0000 Subject: [Python-checkins] bpo-40334: Support suppressing of multiple optional variables in Pegen (GH-20367) Message-ID: https://github.com/python/cpython/commit/cba503151056b448b7a3730dc36ef6655550ade5 commit: cba503151056b448b7a3730dc36ef6655550ade5 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-24T23:20:18+01:00 summary: bpo-40334: Support suppressing of multiple optional variables in Pegen (GH-20367) files: M Tools/peg_generator/pegen/c_generator.py diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 362698b0df62e..1249d4f683e26 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -694,8 +694,8 @@ def visit_Alt( if v == "_cut_var": v += " = 0" # cut_var must be initialized self.print(f"{var_type}{v};") - if v == "_opt_var": - self.print("UNUSED(_opt_var); // Silence compiler warnings") + if v.startswith("_opt_var"): + self.print(f"UNUSED({v}); // Silence compiler warnings") with self.local_variable_context(): if is_loop: From webhook-mailer at python.org Mon May 25 03:03:56 2020 From: webhook-mailer at python.org (Jakub Stasiak) Date: Mon, 25 May 2020 07:03:56 -0000 Subject: [Python-checkins] bpo-38580: Document that select() accepts iterables, not just sequences (GH-16832) Message-ID: https://github.com/python/cpython/commit/372ee27d4958302dac7ad6a8711f6fd04771b2e6 commit: 372ee27d4958302dac7ad6a8711f6fd04771b2e6 branch: master author: Jakub Stasiak committer: GitHub date: 2020-05-25T10:03:48+03:00 summary: bpo-38580: Document that select() accepts iterables, not just sequences (GH-16832) files: M Doc/library/select.rst M Modules/clinic/selectmodule.c.h M Modules/selectmodule.c diff --git a/Doc/library/select.rst b/Doc/library/select.rst index bb2809580d040..a354187c266c7 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -117,7 +117,7 @@ The module defines the following: .. function:: select(rlist, wlist, xlist[, timeout]) This is a straightforward interface to the Unix :c:func:`select` system call. - The first three arguments are sequences of 'waitable objects': either + The first three arguments are iterables of 'waitable objects': either integers representing file descriptors or objects with a parameterless method named :meth:`~io.IOBase.fileno` returning such an integer: @@ -126,7 +126,7 @@ The module defines the following: * *xlist*: wait for an "exceptional condition" (see the manual page for what your system considers such a condition) - Empty sequences are allowed, but acceptance of three empty sequences is + Empty iterables are allowed, but acceptance of three empty iterables is platform-dependent. (It is known to work on Unix but not on Windows.) The optional *timeout* argument specifies a time-out as a floating point number in seconds. When the *timeout* argument is omitted the function blocks until @@ -141,7 +141,7 @@ The module defines the following: single: socket() (in module socket) single: popen() (in module os) - Among the acceptable object types in the sequences are Python :term:`file + Among the acceptable object types in the iterables are Python :term:`file objects ` (e.g. ``sys.stdin``, or objects returned by :func:`open` or :func:`os.popen`), socket objects returned by :func:`socket.socket`. You may also define a :dfn:`wrapper` class yourself, diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h index 670af6a3d8de6..888054b29eba2 100644 --- a/Modules/clinic/selectmodule.c.h +++ b/Modules/clinic/selectmodule.c.h @@ -8,7 +8,7 @@ PyDoc_STRVAR(select_select__doc__, "\n" "Wait until one or more file descriptors are ready for some kind of I/O.\n" "\n" -"The first three arguments are sequences of file descriptors to be waited for:\n" +"The first three arguments are iterables of file descriptors to be waited for:\n" "rlist -- wait until ready for reading\n" "wlist -- wait until ready for writing\n" "xlist -- wait for an \"exceptional condition\"\n" @@ -1215,4 +1215,4 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize #ifndef SELECT_KQUEUE_CONTROL_METHODDEF #define SELECT_KQUEUE_CONTROL_METHODDEF #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ -/*[clinic end generated code: output=26bb05e5fba2bfd1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=029f23fbe000d7f7 input=a9049054013a1b77]*/ diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index bec236689bd89..04e0067eec218 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -239,7 +239,7 @@ select.select Wait until one or more file descriptors are ready for some kind of I/O. -The first three arguments are sequences of file descriptors to be waited for: +The first three arguments are iterables of file descriptors to be waited for: rlist -- wait until ready for reading wlist -- wait until ready for writing xlist -- wait for an "exceptional condition" @@ -264,7 +264,7 @@ descriptors can be used. static PyObject * select_select_impl(PyObject *module, PyObject *rlist, PyObject *wlist, PyObject *xlist, PyObject *timeout_obj) -/*[clinic end generated code: output=2b3cfa824f7ae4cf input=177e72184352df25]*/ +/*[clinic end generated code: output=2b3cfa824f7ae4cf input=e467f5d68033de00]*/ { #ifdef SELECT_USES_HEAP pylist *rfd2obj, *wfd2obj, *efd2obj; @@ -320,7 +320,7 @@ select_select_impl(PyObject *module, PyObject *rlist, PyObject *wlist, } #endif /* SELECT_USES_HEAP */ - /* Convert sequences to fd_sets, and get maximum fd number + /* Convert iterables to fd_sets, and get maximum fd number * propagates the Python exception set in seq2set() */ rfd2obj[0].sentinel = -1; From webhook-mailer at python.org Mon May 25 03:55:14 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Mon, 25 May 2020 07:55:14 -0000 Subject: [Python-checkins] bpo-35714: Reject null characters in struct format strings (GH-16928) Message-ID: https://github.com/python/cpython/commit/3f59b55316f4c6ab451997902579aa69020b537c commit: 3f59b55316f4c6ab451997902579aa69020b537c branch: master author: Zackery Spytz committer: GitHub date: 2020-05-25T10:55:09+03:00 summary: bpo-35714: Reject null characters in struct format strings (GH-16928) struct.error is now raised if there is a null character in a struct format string. files: A Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst M Lib/test/test_struct.py M Modules/_struct.c diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py index 4829fbe1b975c..b3f21ea7db49e 100644 --- a/Lib/test/test_struct.py +++ b/Lib/test/test_struct.py @@ -671,6 +671,14 @@ def __del__(self): self.assertIn(b"Exception ignored in:", stderr) self.assertIn(b"C.__del__", stderr) + def test_issue35714(self): + # Embedded null characters should not be allowed in format strings. + for s in '\0', '2\0i', b'\0': + with self.assertRaisesRegex(struct.error, + 'embedded null character'): + struct.calcsize(s) + + class UnpackIteratorTest(unittest.TestCase): """ Tests for iterative unpacking (struct.Struct.iter_unpack). diff --git a/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst b/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst new file mode 100644 index 0000000000000..39102065ca7b5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst @@ -0,0 +1,2 @@ +:exc:`struct.error` is now raised if there is a null character in a +:mod:`struct` format string. diff --git a/Modules/_struct.c b/Modules/_struct.c index 13d8072f61218..5984bb6811436 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -1296,6 +1296,11 @@ prepare_s(PyStructObject *self) size_t ncodes; fmt = PyBytes_AS_STRING(self->s_format); + if (strlen(fmt) != (size_t)PyBytes_GET_SIZE(self->s_format)) { + PyErr_SetString(_structmodulestate_global->StructError, + "embedded null character"); + return -1; + } f = whichtable(&fmt); From webhook-mailer at python.org Mon May 25 04:43:15 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Mon, 25 May 2020 08:43:15 -0000 Subject: [Python-checkins] bpo-40695: Limit hashlib builtin hash fallback (GH-20259) Message-ID: https://github.com/python/cpython/commit/4cc2f9348c6e899b76af811fa3bb6c60de642a28 commit: 4cc2f9348c6e899b76af811fa3bb6c60de642a28 branch: master author: Christian Heimes committer: GitHub date: 2020-05-25T01:43:10-07:00 summary: bpo-40695: Limit hashlib builtin hash fallback (GH-20259) :mod:`hashlib` no longer falls back to builtin hash implementations when OpenSSL provides a hash digest and the algorithm is blocked by security policy. Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Library/2020-05-20-13-03-28.bpo-40695.lr4aIS.rst M Lib/hashlib.py diff --git a/Lib/hashlib.py b/Lib/hashlib.py index 8d119a4225db9..1b6e50247c181 100644 --- a/Lib/hashlib.py +++ b/Lib/hashlib.py @@ -127,8 +127,9 @@ def __get_openssl_constructor(name): # SHA3/shake are available in OpenSSL 1.1.1+ f = getattr(_hashlib, 'openssl_' + name) # Allow the C module to raise ValueError. The function will be - # defined but the hash not actually available thanks to OpenSSL. - f() + # defined but the hash not actually available. Don't fall back to + # builtin if the current security policy blocks a digest, bpo#40695. + f(usedforsecurity=False) # Use the C function directly (very fast) return f except (AttributeError, ValueError): diff --git a/Misc/NEWS.d/next/Library/2020-05-20-13-03-28.bpo-40695.lr4aIS.rst b/Misc/NEWS.d/next/Library/2020-05-20-13-03-28.bpo-40695.lr4aIS.rst new file mode 100644 index 0000000000000..643779bab4948 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-20-13-03-28.bpo-40695.lr4aIS.rst @@ -0,0 +1,3 @@ +:mod:`hashlib` no longer falls back to builtin hash implementations when +OpenSSL provides a hash digest and the algorithm is blocked by security +policy. From webhook-mailer at python.org Mon May 25 04:44:56 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Mon, 25 May 2020 08:44:56 -0000 Subject: [Python-checkins] bpo-40671: Prepare _hashlib for PEP 489 (GH-20180) Message-ID: https://github.com/python/cpython/commit/20c22db602bf2a51f5231433b9054290f8069b90 commit: 20c22db602bf2a51f5231433b9054290f8069b90 branch: master author: Christian Heimes committer: GitHub date: 2020-05-25T10:44:51+02:00 summary: bpo-40671: Prepare _hashlib for PEP 489 (GH-20180) files: A Misc/NEWS.d/next/Library/2020-05-18-15-26-31.bpo-40671.NeZ9Cy.rst M Modules/_hashopenssl.c diff --git a/Misc/NEWS.d/next/Library/2020-05-18-15-26-31.bpo-40671.NeZ9Cy.rst b/Misc/NEWS.d/next/Library/2020-05-18-15-26-31.bpo-40671.NeZ9Cy.rst new file mode 100644 index 0000000000000..d38b88dbf356d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-18-15-26-31.bpo-40671.NeZ9Cy.rst @@ -0,0 +1 @@ +Prepare ``_hashlib`` for :pep:`489` and use :c:func:`PyModule_AddType`. diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 674bddc090a6f..0b2ef95a6f126 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -95,9 +95,6 @@ get_hashlib_state(PyObject *module) return (_hashlibstate *)state; } -#define _hashlibstate_global ((_hashlibstate *)PyModule_GetState(PyState_FindModule(&_hashlibmodule))) - - typedef struct { PyObject_HEAD EVP_MD_CTX *ctx; /* OpenSSL message digest context */ @@ -1763,22 +1760,30 @@ _openssl_hash_name_mapper(const EVP_MD *md, const char *from, /* Ask OpenSSL for a list of supported ciphers, filling in a Python set. */ -static PyObject* -generate_hash_name_list(void) +static int +hashlib_md_meth_names(PyObject *module) { - _InternalNameMapperState state; - state.set = PyFrozenSet_New(NULL); - if (state.set == NULL) - return NULL; - state.error = 0; + _InternalNameMapperState state = { + .set = PyFrozenSet_New(NULL), + .error = 0 + }; + if (state.set == NULL) { + return -1; + } EVP_MD_do_all(&_openssl_hash_name_mapper, &state); if (state.error) { Py_DECREF(state.set); - return NULL; + return -1; } - return state.set; + + if (PyModule_AddObject(module, "openssl_md_meth_names", state.set) < 0) { + Py_DECREF(state.set); + return -1; + } + + return 0; } /* LibreSSL doesn't support FIPS: @@ -1885,94 +1890,136 @@ hashlib_free(void *m) hashlib_clear((PyObject *)m); } +/* Py_mod_exec functions */ +static int +hashlib_openssl_legacy_init(PyObject *module) +{ +#if (OPENSSL_VERSION_NUMBER < 0x10100000L) || defined(LIBRESSL_VERSION_NUMBER) + /* Load all digest algorithms and initialize cpuid */ + OPENSSL_add_all_algorithms_noconf(); + ERR_load_crypto_strings(); +#endif + return 0; +} -static struct PyModuleDef _hashlibmodule = { - PyModuleDef_HEAD_INIT, - "_hashlib", - NULL, - sizeof(_hashlibstate), - EVP_functions, - NULL, - hashlib_traverse, - hashlib_clear, - hashlib_free -}; +static int +hashlib_init_evptype(PyObject *module) +{ + _hashlibstate *state = get_hashlib_state(module); -PyMODINIT_FUNC -PyInit__hashlib(void) + state->EVPtype = (PyTypeObject *)PyType_FromSpec(&EVPtype_spec); + if (state->EVPtype == NULL) { + return -1; + } + if (PyModule_AddType(module, state->EVPtype) < 0) { + return -1; + } + return 0; +} + +static int +hashlib_init_evpxoftype(PyObject *module) { - PyObject *m, *openssl_md_meth_names; - _hashlibstate *state = NULL; #ifdef PY_OPENSSL_HAS_SHAKE + _hashlibstate *state = get_hashlib_state(module); PyObject *bases; + + if (state->EVPtype == NULL) { + return -1; + } + + bases = PyTuple_Pack(1, state->EVPtype); + if (bases == NULL) { + return -1; + } + + state->EVPXOFtype = (PyTypeObject *)PyType_FromSpecWithBases( + &EVPXOFtype_spec, bases + ); + Py_DECREF(bases); + if (state->EVPXOFtype == NULL) { + return -1; + } + if (PyModule_AddType(module, state->EVPXOFtype) < 0) { + return -1; + } #endif + return 0; +} -#if (OPENSSL_VERSION_NUMBER < 0x10100000L) || defined(LIBRESSL_VERSION_NUMBER) - /* Load all digest algorithms and initialize cpuid */ - OPENSSL_add_all_algorithms_noconf(); - ERR_load_crypto_strings(); +static int +hashlib_init_hmactype(PyObject *module) +{ + _hashlibstate *state = get_hashlib_state(module); + + state->HMACtype = (PyTypeObject *)PyType_FromSpec(&HMACtype_spec); + if (state->HMACtype == NULL) { + return -1; + } + if (PyModule_AddType(module, state->HMACtype) < 0) { + return -1; + } + return 0; +} + +#if 0 +static PyModuleDef_Slot hashlib_slots[] = { + /* OpenSSL 1.0.2 and LibreSSL */ + {Py_mod_exec, hashlib_openssl_legacy_init}, + {Py_mod_exec, hashlib_init_evptype}, + {Py_mod_exec, hashlib_init_evpxoftype}, + {Py_mod_exec, hashlib_init_hmactype}, + {Py_mod_exec, hashlib_md_meth_names}, + {0, NULL} +}; #endif - m = PyState_FindModule(&_hashlibmodule); +static struct PyModuleDef _hashlibmodule = { + PyModuleDef_HEAD_INIT, + .m_name = "_hashlib", + .m_doc = "OpenSSL interface for hashlib module", + .m_size = sizeof(_hashlibstate), + .m_methods = EVP_functions, + .m_slots = NULL, + .m_traverse = hashlib_traverse, + .m_clear = hashlib_clear, + .m_free = hashlib_free +}; + +PyMODINIT_FUNC +PyInit__hashlib(void) +{ + PyObject *m = PyState_FindModule(&_hashlibmodule); if (m != NULL) { Py_INCREF(m); return m; } m = PyModule_Create(&_hashlibmodule); - if (m == NULL) - return NULL; - - state = get_hashlib_state(m); - - PyTypeObject *EVPtype = (PyTypeObject *)PyType_FromSpec(&EVPtype_spec); - if (EVPtype == NULL) { - Py_DECREF(m); + if (m == NULL) { return NULL; } - state->EVPtype = EVPtype; - Py_INCREF((PyObject *)state->EVPtype); - PyModule_AddObject(m, "HASH", (PyObject *)state->EVPtype); - PyTypeObject *HMACtype = (PyTypeObject *)PyType_FromSpec(&HMACtype_spec); - if (HMACtype == NULL) { + if (hashlib_openssl_legacy_init(m) < 0) { Py_DECREF(m); return NULL; } - state->HMACtype = HMACtype; - Py_INCREF((PyObject *)state->HMACtype); - PyModule_AddObject(m, "HMAC", (PyObject *)state->HMACtype); - -#ifdef PY_OPENSSL_HAS_SHAKE - bases = PyTuple_Pack(1, (PyObject *)EVPtype); - if (bases == NULL) { + if (hashlib_init_evptype(m) < 0) { Py_DECREF(m); return NULL; } - PyTypeObject *EVPXOFtype = (PyTypeObject *)PyType_FromSpecWithBases( - &EVPXOFtype_spec, bases - ); - Py_DECREF(bases); - if (EVPXOFtype == NULL) { + if (hashlib_init_evpxoftype(m) < 0) { Py_DECREF(m); return NULL; } - state->EVPXOFtype = EVPXOFtype; - - Py_INCREF((PyObject *)state->EVPXOFtype); - PyModule_AddObject(m, "HASHXOF", (PyObject *)state->EVPXOFtype); -#endif - - openssl_md_meth_names = generate_hash_name_list(); - if (openssl_md_meth_names == NULL) { + if (hashlib_init_hmactype(m) < 0) { Py_DECREF(m); return NULL; } - if (PyModule_AddObject(m, "openssl_md_meth_names", openssl_md_meth_names)) { + if (hashlib_md_meth_names(m) == -1) { Py_DECREF(m); return NULL; } - PyState_AddModule(m, &_hashlibmodule); return m; } From webhook-mailer at python.org Mon May 25 10:54:45 2020 From: webhook-mailer at python.org (Sergey Fedoseev) Date: Mon, 25 May 2020 14:54:45 -0000 Subject: [Python-checkins] bpo-34397: Remove redundant overflow checks in list and tuple implementation. (GH-8757) Message-ID: https://github.com/python/cpython/commit/e682b26a6bc6d3db1a44d82db09d26224e82ccb5 commit: e682b26a6bc6d3db1a44d82db09d26224e82ccb5 branch: master author: Sergey Fedoseev committer: GitHub date: 2020-05-25T07:54:40-07:00 summary: bpo-34397: Remove redundant overflow checks in list and tuple implementation. (GH-8757) files: M Objects/listobject.c M Objects/tupleobject.c diff --git a/Objects/listobject.c b/Objects/listobject.c index 37fadca129ac0..30d2620753744 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -261,12 +261,8 @@ ins1(PyListObject *self, Py_ssize_t where, PyObject *v) PyErr_BadInternalCall(); return -1; } - if (n == PY_SSIZE_T_MAX) { - PyErr_SetString(PyExc_OverflowError, - "cannot add more objects to list"); - return -1; - } + assert((size_t)n + 1 < PY_SSIZE_T_MAX); if (list_resize(self, n+1) < 0) return -1; @@ -301,12 +297,7 @@ app1(PyListObject *self, PyObject *v) Py_ssize_t n = PyList_GET_SIZE(self); assert (v != NULL); - if (n == PY_SSIZE_T_MAX) { - PyErr_SetString(PyExc_OverflowError, - "cannot add more objects to list"); - return -1; - } - + assert((size_t)n + 1 < PY_SSIZE_T_MAX); if (list_resize(self, n+1) < 0) return -1; @@ -503,8 +494,7 @@ list_concat(PyListObject *a, PyObject *bb) return NULL; } #define b ((PyListObject *)bb) - if (Py_SIZE(a) > PY_SSIZE_T_MAX - Py_SIZE(b)) - return PyErr_NoMemory(); + assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX); size = Py_SIZE(a) + Py_SIZE(b); np = (PyListObject *) list_new_prealloc(size); if (np == NULL) { diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index c0b59c009a2e9..14534632dfea7 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -486,8 +486,7 @@ tupleconcat(PyTupleObject *a, PyObject *bb) Py_INCREF(a); return (PyObject *)a; } - if (Py_SIZE(a) > PY_SSIZE_T_MAX - Py_SIZE(b)) - return PyErr_NoMemory(); + assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX); size = Py_SIZE(a) + Py_SIZE(b); if (size == 0) { return PyTuple_New(0); From webhook-mailer at python.org Mon May 25 11:11:47 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 25 May 2020 15:11:47 -0000 Subject: [Python-checkins] bpo-38972: Fix typos in PowerShell Execution Policies links (GH-20383) Message-ID: https://github.com/python/cpython/commit/331b2dfadb2a5dd990145c043d006166e568af7b commit: 331b2dfadb2a5dd990145c043d006166e568af7b branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-25T08:11:39-07:00 summary: bpo-38972: Fix typos in PowerShell Execution Policies links (GH-20383) (cherry picked from commit ef16958d17e83723334a51428f410f726d6492a7) Co-authored-by: Miro Hron?ok files: M Doc/using/venv-create.inc M Lib/venv/scripts/common/Activate.ps1 diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc index 99b2a19a18d06..c81aaf15d885a 100644 --- a/Doc/using/venv-create.inc +++ b/Doc/using/venv-create.inc @@ -86,7 +86,7 @@ The command, if run with ``-h``, will show the available options:: PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser See `About Execution Policies - `_ + `_ for more information. The created ``pyvenv.cfg`` file also includes the diff --git a/Lib/venv/scripts/common/Activate.ps1 b/Lib/venv/scripts/common/Activate.ps1 index b8245b1bbe5c8..a3bc6fb1f05bf 100644 --- a/Lib/venv/scripts/common/Activate.ps1 +++ b/Lib/venv/scripts/common/Activate.ps1 @@ -45,7 +45,7 @@ command: PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser For more information on Execution Policies: -ttps:/go.microsoft.com/fwlink/?LinkID=135170 +https://go.microsoft.com/fwlink/?LinkID=135170 #> Param( From webhook-mailer at python.org Mon May 25 12:52:59 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Mon, 25 May 2020 16:52:59 -0000 Subject: [Python-checkins] bpo-39573: Convert Py_TYPE() to a static inline function (GH-20290) Message-ID: https://github.com/python/cpython/commit/ad3252bad905d41635bcbb4b76db30d570cf0087 commit: ad3252bad905d41635bcbb4b76db30d570cf0087 branch: master author: Dong-hee Na committer: GitHub date: 2020-05-26T01:52:54+09:00 summary: bpo-39573: Convert Py_TYPE() to a static inline function (GH-20290) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-22-00-34-34.bpo-39573.QO2QHj.rst M Doc/c-api/structures.rst M Doc/whatsnew/3.10.rst M Include/object.h diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 634e971952e8e..5535f42ac120a 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -62,12 +62,15 @@ the definition of all other Python objects. See documentation of :c:type:`PyVarObject` above. -.. c:macro:: Py_TYPE(o) +.. c:function:: PyTypeObject* Py_TYPE(const PyObject *o) - This macro is used to access the :attr:`ob_type` member of a Python object. - It expands to:: + Get the type of the Python object *o*. + + Return a borrowed reference. - (((PyObject*)(o))->ob_type) + .. versionchanged:: 3.10 + :c:func:`Py_TYPE()` is changed to the inline static function. + Use :c:func:`Py_SET_TYPE()` to set an object type. .. c:function:: int Py_IS_TYPE(PyObject *o, PyTypeObject *type) diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index e650f9405a811..98a231f80aaf2 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -97,21 +97,14 @@ Optimizations ============= -Build and C API Changes -======================= - - - Deprecated ========== - Removed ======= - Porting to Python 3.10 ====================== @@ -119,3 +112,26 @@ This section lists previously described changes and other bugfixes that may require changes to your code. + +Build Changes +============= + + +C API Changes +============= + +New Features +------------ + + +Porting to Python 3.10 +---------------------- + +* Since :c:func:`Py_TYPE()` is changed to the inline static function, + ``Py_TYPE(obj) = new_type`` must be replaced with ``Py_SET_TYPE(obj, new_type)``: + see :c:func:`Py_SET_TYPE()` (available since Python 3.9). + (Contributed by Dong-hee Na in :issue:`39573`.) + + +Removed +------- diff --git a/Include/object.h b/Include/object.h index 514d934196f57..5ad05699bb45c 100644 --- a/Include/object.h +++ b/Include/object.h @@ -121,9 +121,13 @@ typedef struct { #define _PyVarObject_CAST(op) ((PyVarObject*)(op)) #define Py_REFCNT(ob) (_PyObject_CAST(ob)->ob_refcnt) -#define Py_TYPE(ob) (_PyObject_CAST(ob)->ob_type) #define Py_SIZE(ob) (_PyVarObject_CAST(ob)->ob_size) +static inline PyTypeObject* _Py_TYPE(const PyObject *ob) { + return ob->ob_type; +} +#define Py_TYPE(ob) _Py_TYPE(_PyObject_CAST_CONST(ob)) + static inline int _Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) { return ob->ob_type == type; } diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-22-00-34-34.bpo-39573.QO2QHj.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-22-00-34-34.bpo-39573.QO2QHj.rst new file mode 100644 index 0000000000000..2430030304267 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-22-00-34-34.bpo-39573.QO2QHj.rst @@ -0,0 +1,2 @@ +:c:func:`Py_TYPE()` is changed to the inline static function. Patch by +Dong-hee Na. From webhook-mailer at python.org Mon May 25 13:25:36 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Mon, 25 May 2020 17:25:36 -0000 Subject: [Python-checkins] bpo-39573: Fix buildbot failure for tupleobject.c (GH-20391) Message-ID: https://github.com/python/cpython/commit/7d847e29d76b178c2db66b180065771b4d90c78f commit: 7d847e29d76b178c2db66b180065771b4d90c78f branch: master author: Dong-hee Na committer: GitHub date: 2020-05-26T02:25:28+09:00 summary: bpo-39573: Fix buildbot failure for tupleobject.c (GH-20391) files: M Objects/tupleobject.c diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 14534632dfea7..e4c0c91cfe819 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -83,7 +83,7 @@ tuple_alloc(Py_ssize_t size) /* Inline PyObject_InitVar */ #ifdef Py_TRACE_REFS Py_SIZE(op) = size; - Py_TYPE(op) = &PyTuple_Type; + Py_SET_TYPE(op, &PyTuple_Type); #endif _Py_NewReference((PyObject *)op); } From webhook-mailer at python.org Mon May 25 13:38:55 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 25 May 2020 17:38:55 -0000 Subject: [Python-checkins] bpo-40750: Support -d flag in the new parser (GH-20340) Message-ID: https://github.com/python/cpython/commit/800a35c623bbcdb5793c7d7a4974524286311479 commit: 800a35c623bbcdb5793c7d7a4974524286311479 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-25T18:38:45+01:00 summary: bpo-40750: Support -d flag in the new parser (GH-20340) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-24-02-42-26.bpo-40750.ZmO9Ev.rst M Parser/pegen/parse.c M Parser/pegen/pegen.c M Parser/pegen/pegen.h M Tools/peg_generator/pegen/c_generator.py M Tools/peg_generator/pegen/testutil.py diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-24-02-42-26.bpo-40750.ZmO9Ev.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-24-02-42-26.bpo-40750.ZmO9Ev.rst new file mode 100644 index 0000000000000..4032b80169035 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-24-02-42-26.bpo-40750.ZmO9Ev.rst @@ -0,0 +1 @@ +Support the "-d" debug flag in the new PEG parser. Patch by Pablo Galindo diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 5dff77abc9fd7..2b735472ad620 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -1,5 +1,12 @@ // @generated by pegen.py from ./Grammar/python.gram #include "pegen.h" + +#ifdef Py_DEBUG +extern int Py_DebugFlag; +#define D(x) if (Py_DebugFlag) x; +#else +#define D(x) +#endif static const int n_keyword_lists = 15; static KeywordToken *reserved_keywords[] = { NULL, @@ -683,15 +690,19 @@ static void *_tmp_148_rule(Parser *p); static mod_ty file_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } mod_ty _res = NULL; int _mark = p->mark; { // statements? $ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> file[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statements? $")); void *a; Token * endmarker_var; if ( @@ -700,17 +711,22 @@ file_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { + D(fprintf(stderr, "%*c+ file[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statements? $")); _res = _PyPegen_make_module ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s file[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statements? $")); } _res = NULL; done: + D(p->level--); return _res; } @@ -718,31 +734,40 @@ file_rule(Parser *p) static mod_ty interactive_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } mod_ty _res = NULL; int _mark = p->mark; { // statement_newline if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> interactive[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement_newline")); asdl_seq* a; if ( (a = statement_newline_rule(p)) // statement_newline ) { + D(fprintf(stderr, "%*c+ interactive[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statement_newline")); _res = Interactive ( a , p -> arena ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s interactive[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement_newline")); } _res = NULL; done: + D(p->level--); return _res; } @@ -750,15 +775,19 @@ interactive_rule(Parser *p) static mod_ty eval_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } mod_ty _res = NULL; int _mark = p->mark; { // expressions NEWLINE* $ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> eval[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions NEWLINE* $")); asdl_seq * _loop0_1_var; expr_ty a; Token * endmarker_var; @@ -770,17 +799,22 @@ eval_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { + D(fprintf(stderr, "%*c+ eval[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions NEWLINE* $")); _res = Expression ( a , p -> arena ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s eval[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions NEWLINE* $")); } _res = NULL; done: + D(p->level--); return _res; } @@ -788,15 +822,19 @@ eval_rule(Parser *p) static mod_ty func_type_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } mod_ty _res = NULL; int _mark = p->mark; { // '(' type_expressions? ')' '->' expression NEWLINE* $ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> func_type[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' type_expressions? ')' '->' expression NEWLINE* $")); Token * _literal; Token * _literal_1; Token * _literal_2; @@ -820,17 +858,22 @@ func_type_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { + D(fprintf(stderr, "%*c+ func_type[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' type_expressions? ')' '->' expression NEWLINE* $")); _res = FunctionType ( a , b , p -> arena ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s func_type[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' type_expressions? ')' '->' expression NEWLINE* $")); } _res = NULL; done: + D(p->level--); return _res; } @@ -838,27 +881,35 @@ func_type_rule(Parser *p) static expr_ty fstring_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; { // star_expressions if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> fstring[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { + D(fprintf(stderr, "%*c+ fstring[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; done: + D(p->level--); return _res; } @@ -873,15 +924,19 @@ fstring_rule(Parser *p) static asdl_seq* type_expressions_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.expression+ ',' '*' expression ',' '**' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression ',' '**' expression")); Token * _literal; Token * _literal_1; Token * _literal_2; @@ -905,19 +960,25 @@ type_expressions_rule(Parser *p) (c = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression ',' '**' expression")); _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+ ',' '*' expression ',' '**' expression")); } { // ','.expression+ ',' '*' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression")); Token * _literal; Token * _literal_1; asdl_seq * a; @@ -932,19 +993,25 @@ type_expressions_rule(Parser *p) (b = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression")); _res = _PyPegen_seq_append_to_end ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+ ',' '*' expression")); } { // ','.expression+ ',' '**' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '**' expression")); Token * _literal; Token * _literal_1; asdl_seq * a; @@ -959,19 +1026,25 @@ type_expressions_rule(Parser *p) (b = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '**' expression")); _res = _PyPegen_seq_append_to_end ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+ ',' '**' expression")); } { // '*' expression ',' '**' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression ',' '**' expression")); Token * _literal; Token * _literal_1; Token * _literal_2; @@ -989,19 +1062,25 @@ type_expressions_rule(Parser *p) (b = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression ',' '**' expression")); _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression ',' '**' expression")); } { // '*' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression")); Token * _literal; expr_ty a; if ( @@ -1010,19 +1089,25 @@ type_expressions_rule(Parser *p) (a = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression")); _res = _PyPegen_singleton_seq ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression")); } { // '**' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' expression")); Token * _literal; expr_ty a; if ( @@ -1031,31 +1116,41 @@ type_expressions_rule(Parser *p) (a = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' expression")); _res = _PyPegen_singleton_seq ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' expression")); } { // ','.expression+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+")); asdl_seq * _gather_9_var; if ( (_gather_9_var = _gather_9_rule(p)) // ','.expression+ ) { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+")); _res = _gather_9_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -1063,31 +1158,40 @@ type_expressions_rule(Parser *p) static asdl_seq* statements_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // statement+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> statements[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement+")); asdl_seq * a; if ( (a = _loop1_11_rule(p)) // statement+ ) { + D(fprintf(stderr, "%*c+ statements[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statement+")); _res = _PyPegen_seq_flatten ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s statements[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -1095,45 +1199,59 @@ statements_rule(Parser *p) static asdl_seq* statement_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // compound_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> statement[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compound_stmt")); stmt_ty a; if ( (a = compound_stmt_rule(p)) // compound_stmt ) { + D(fprintf(stderr, "%*c+ statement[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "compound_stmt")); _res = _PyPegen_singleton_seq ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s statement[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compound_stmt")); } { // simple_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> statement[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt")); asdl_seq* simple_stmt_var; if ( (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { + D(fprintf(stderr, "%*c+ statement[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt")); _res = simple_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s statement[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt")); } _res = NULL; done: + D(p->level--); return _res; } @@ -1141,13 +1259,16 @@ statement_rule(Parser *p) static asdl_seq* statement_newline_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -1156,8 +1277,10 @@ statement_newline_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // compound_stmt NEWLINE if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compound_stmt NEWLINE")); stmt_ty a; Token * newline_var; if ( @@ -1166,40 +1289,53 @@ statement_newline_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { + D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "compound_stmt NEWLINE")); _res = _PyPegen_singleton_seq ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compound_stmt NEWLINE")); } { // simple_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt")); asdl_seq* simple_stmt_var; if ( (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { + D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt")); _res = simple_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt")); } { // NEWLINE if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { + D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -1209,32 +1345,42 @@ statement_newline_rule(Parser *p) _res = _PyPegen_singleton_seq ( p , CHECK ( _Py_Pass ( EXTRA ) ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); } { // $ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "$")); Token * endmarker_var; if ( (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { + D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "$")); _res = _PyPegen_interactive_exit ( p ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "$")); } _res = NULL; done: + D(p->level--); return _res; } @@ -1242,15 +1388,19 @@ statement_newline_rule(Parser *p) static asdl_seq* simple_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // small_stmt !';' NEWLINE if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "small_stmt !';' NEWLINE")); stmt_ty a; Token * newline_var; if ( @@ -1261,19 +1411,25 @@ simple_stmt_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { + D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "small_stmt !';' NEWLINE")); _res = _PyPegen_singleton_seq ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "small_stmt !';' NEWLINE")); } { // ';'.small_stmt+ ';'? NEWLINE if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'.small_stmt+ ';'? NEWLINE")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -1286,17 +1442,22 @@ simple_stmt_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { + D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'.small_stmt+ ';'? NEWLINE")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'.small_stmt+ ';'? NEWLINE")); } _res = NULL; done: + D(p->level--); return _res; } @@ -1317,15 +1478,20 @@ simple_stmt_rule(Parser *p) static stmt_ty small_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; - if (_PyPegen_is_memoized(p, small_stmt_type, &_res)) + if (_PyPegen_is_memoized(p, small_stmt_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -1334,29 +1500,38 @@ small_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // assignment if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment")); stmt_ty assignment_var; if ( (assignment_var = assignment_rule(p)) // assignment ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment")); _res = assignment_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment")); } { // star_expressions if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty e; if ( (e = star_expressions_rule(p)) // star_expressions ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -1366,16 +1541,21 @@ small_stmt_rule(Parser *p) _res = _Py_Expr ( e , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } { // &'return' return_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'return' return_stmt")); stmt_ty return_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) // token='return' @@ -1383,15 +1563,20 @@ small_stmt_rule(Parser *p) (return_stmt_var = return_stmt_rule(p)) // return_stmt ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'return' return_stmt")); _res = return_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'return' return_stmt")); } { // &('import' | 'from') import_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt")); stmt_ty import_stmt_var; if ( _PyPegen_lookahead(1, _tmp_14_rule, p) @@ -1399,15 +1584,20 @@ small_stmt_rule(Parser *p) (import_stmt_var = import_stmt_rule(p)) // import_stmt ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt")); _res = import_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('import' | 'from') import_stmt")); } { // &'raise' raise_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt")); stmt_ty raise_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) // token='raise' @@ -1415,22 +1605,29 @@ small_stmt_rule(Parser *p) (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt")); _res = raise_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'raise' raise_stmt")); } { // 'pass' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'pass'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 502)) // token='pass' ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'pass'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -1440,16 +1637,21 @@ small_stmt_rule(Parser *p) _res = _Py_Pass ( EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'pass'")); } { // &'del' del_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); stmt_ty del_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) // token='del' @@ -1457,15 +1659,20 @@ small_stmt_rule(Parser *p) (del_stmt_var = del_stmt_rule(p)) // del_stmt ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); _res = del_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'del' del_stmt")); } { // &'yield' yield_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt")); stmt_ty yield_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) // token='yield' @@ -1473,15 +1680,20 @@ small_stmt_rule(Parser *p) (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt")); _res = yield_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'yield' yield_stmt")); } { // &'assert' assert_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt")); stmt_ty assert_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) // token='assert' @@ -1489,22 +1701,29 @@ small_stmt_rule(Parser *p) (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt")); _res = assert_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'assert' assert_stmt")); } { // 'break' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'break'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 506)) // token='break' ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'break'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -1514,23 +1733,30 @@ small_stmt_rule(Parser *p) _res = _Py_Break ( EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'break'")); } { // 'continue' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'continue'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 507)) // token='continue' ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'continue'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -1540,16 +1766,21 @@ small_stmt_rule(Parser *p) _res = _Py_Continue ( EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'continue'")); } { // &'global' global_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt")); stmt_ty global_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) // token='global' @@ -1557,15 +1788,20 @@ small_stmt_rule(Parser *p) (global_stmt_var = global_stmt_rule(p)) // global_stmt ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt")); _res = global_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'global' global_stmt")); } { // &'nonlocal' nonlocal_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt")); stmt_ty nonlocal_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) // token='nonlocal' @@ -1573,14 +1809,18 @@ small_stmt_rule(Parser *p) (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt ) { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt")); _res = nonlocal_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'nonlocal' nonlocal_stmt")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, small_stmt_type, _res); + D(p->level--); return _res; } @@ -1595,15 +1835,19 @@ small_stmt_rule(Parser *p) static stmt_ty compound_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; { // &('def' | '@' | ASYNC) function_def if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); stmt_ty function_def_var; if ( _PyPegen_lookahead(1, _tmp_15_rule, p) @@ -1611,15 +1855,20 @@ compound_stmt_rule(Parser *p) (function_def_var = function_def_rule(p)) // function_def ) { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); _res = function_def_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); } { // &'if' if_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) // token='if' @@ -1627,15 +1876,20 @@ compound_stmt_rule(Parser *p) (if_stmt_var = if_stmt_rule(p)) // if_stmt ) { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); _res = if_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'if' if_stmt")); } { // &('class' | '@') class_def if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('class' | '@') class_def")); stmt_ty class_def_var; if ( _PyPegen_lookahead(1, _tmp_16_rule, p) @@ -1643,15 +1897,20 @@ compound_stmt_rule(Parser *p) (class_def_var = class_def_rule(p)) // class_def ) { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('class' | '@') class_def")); _res = class_def_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('class' | '@') class_def")); } { // &('with' | ASYNC) with_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt")); stmt_ty with_stmt_var; if ( _PyPegen_lookahead(1, _tmp_17_rule, p) @@ -1659,15 +1918,20 @@ compound_stmt_rule(Parser *p) (with_stmt_var = with_stmt_rule(p)) // with_stmt ) { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt")); _res = with_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('with' | ASYNC) with_stmt")); } { // &('for' | ASYNC) for_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt")); stmt_ty for_stmt_var; if ( _PyPegen_lookahead(1, _tmp_18_rule, p) @@ -1675,15 +1939,20 @@ compound_stmt_rule(Parser *p) (for_stmt_var = for_stmt_rule(p)) // for_stmt ) { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt")); _res = for_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('for' | ASYNC) for_stmt")); } { // &'try' try_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) // token='try' @@ -1691,15 +1960,20 @@ compound_stmt_rule(Parser *p) (try_stmt_var = try_stmt_rule(p)) // try_stmt ) { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); _res = try_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'try' try_stmt")); } { // &'while' while_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) // token='while' @@ -1707,13 +1981,17 @@ compound_stmt_rule(Parser *p) (while_stmt_var = while_stmt_rule(p)) // while_stmt ) { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); _res = while_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'while' while_stmt")); } _res = NULL; done: + D(p->level--); return _res; } @@ -1726,13 +2004,16 @@ compound_stmt_rule(Parser *p) static stmt_ty assignment_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -1741,8 +2022,10 @@ assignment_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':' expression ['=' annotated_rhs] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); Token * _literal; expr_ty a; expr_ty b; @@ -1757,8 +2040,10 @@ assignment_rule(Parser *p) (c = _tmp_19_rule(p), 1) // ['=' annotated_rhs] ) { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -1768,16 +2053,21 @@ assignment_rule(Parser *p) _res = CHECK_VERSION ( 6 , "Variable annotation syntax is" , _Py_AnnAssign ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , c , 1 , EXTRA ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); } { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); Token * _literal; void *a; expr_ty b; @@ -1792,8 +2082,10 @@ assignment_rule(Parser *p) (c = _tmp_21_rule(p), 1) // ['=' annotated_rhs] ) { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -1803,16 +2095,21 @@ assignment_rule(Parser *p) _res = CHECK_VERSION ( 6 , "Variable annotations syntax is" , _Py_AnnAssign ( a , b , c , 0 , EXTRA ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); } { // ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT?")); asdl_seq * a; void *b; void *tc; @@ -1824,8 +2121,10 @@ assignment_rule(Parser *p) (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -1835,16 +2134,21 @@ assignment_rule(Parser *p) _res = _Py_Assign ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT?")); } { // single_target augassign (yield_expr | star_expressions) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); expr_ty a; AugOperator* b; void *c; @@ -1856,8 +2160,10 @@ assignment_rule(Parser *p) (c = _tmp_24_rule(p)) // yield_expr | star_expressions ) { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -1867,28 +2173,37 @@ assignment_rule(Parser *p) _res = _Py_AugAssign ( a , b -> kind , c , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); } { // invalid_assignment if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_assignment")); void *invalid_assignment_var; if ( (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment ) { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_assignment")); _res = invalid_assignment_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_assignment")); } _res = NULL; done: + D(p->level--); return _res; } @@ -1909,247 +2224,328 @@ assignment_rule(Parser *p) static AugOperator* augassign_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } AugOperator* _res = NULL; int _mark = p->mark; { // '+=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 36)) // token='+=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+='")); _res = _PyPegen_augoperator ( p , Add ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+='")); } { // '-=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 37)) // token='-=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-='")); _res = _PyPegen_augoperator ( p , Sub ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-='")); } { // '*=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 38)) // token='*=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*='")); _res = _PyPegen_augoperator ( p , Mult ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*='")); } { // '@=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 50)) // token='@=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@='")); _res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@='")); } { // '/=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 39)) // token='/=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/='")); _res = _PyPegen_augoperator ( p , Div ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/='")); } { // '%=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'%='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 40)) // token='%=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'%='")); _res = _PyPegen_augoperator ( p , Mod ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'%='")); } { // '&=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'&='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 41)) // token='&=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'&='")); _res = _PyPegen_augoperator ( p , BitAnd ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'&='")); } { // '|=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 42)) // token='|=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'|='")); _res = _PyPegen_augoperator ( p , BitOr ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'|='")); } { // '^=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'^='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 43)) // token='^=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'^='")); _res = _PyPegen_augoperator ( p , BitXor ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'^='")); } { // '<<=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'<<='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 44)) // token='<<=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'<<='")); _res = _PyPegen_augoperator ( p , LShift ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'<<='")); } { // '>>=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'>>='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 45)) // token='>>=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'>>='")); _res = _PyPegen_augoperator ( p , RShift ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'>>='")); } { // '**=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 46)) // token='**=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**='")); _res = _PyPegen_augoperator ( p , Pow ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**='")); } { // '//=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'//='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 48)) // token='//=' ) { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'//='")); _res = _PyPegen_augoperator ( p , FloorDiv ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'//='")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2157,13 +2553,16 @@ augassign_rule(Parser *p) static stmt_ty global_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -2172,8 +2571,10 @@ global_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'global' ','.NAME+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> global_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+")); Token * _keyword; asdl_seq * a; if ( @@ -2182,8 +2583,10 @@ global_stmt_rule(Parser *p) (a = _gather_25_rule(p)) // ','.NAME+ ) { + D(fprintf(stderr, "%*c+ global_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2193,14 +2596,18 @@ global_stmt_rule(Parser *p) _res = _Py_Global ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s global_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'global' ','.NAME+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2208,13 +2615,16 @@ global_stmt_rule(Parser *p) static stmt_ty nonlocal_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -2223,8 +2633,10 @@ nonlocal_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'nonlocal' ','.NAME+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> nonlocal_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+")); Token * _keyword; asdl_seq * a; if ( @@ -2233,8 +2645,10 @@ nonlocal_stmt_rule(Parser *p) (a = _gather_27_rule(p)) // ','.NAME+ ) { + D(fprintf(stderr, "%*c+ nonlocal_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2244,14 +2658,18 @@ nonlocal_stmt_rule(Parser *p) _res = _Py_Nonlocal ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s nonlocal_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'nonlocal' ','.NAME+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2259,13 +2677,16 @@ nonlocal_stmt_rule(Parser *p) static stmt_ty yield_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -2274,15 +2695,19 @@ yield_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // yield_expr if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> yield_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty y; if ( (y = yield_expr_rule(p)) // yield_expr ) { + D(fprintf(stderr, "%*c+ yield_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2292,14 +2717,18 @@ yield_stmt_rule(Parser *p) _res = _Py_Expr ( y , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s yield_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2307,13 +2736,16 @@ yield_stmt_rule(Parser *p) static stmt_ty assert_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -2322,8 +2754,10 @@ assert_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'assert' expression [',' expression] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> assert_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]")); Token * _keyword; expr_ty a; void *b; @@ -2335,8 +2769,10 @@ assert_stmt_rule(Parser *p) (b = _tmp_29_rule(p), 1) // [',' expression] ) { + D(fprintf(stderr, "%*c+ assert_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2346,14 +2782,18 @@ assert_stmt_rule(Parser *p) _res = _Py_Assert ( a , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s assert_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'assert' expression [',' expression]")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2361,13 +2801,16 @@ assert_stmt_rule(Parser *p) static stmt_ty del_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -2376,8 +2819,10 @@ del_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'del' del_targets if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'del' del_targets")); Token * _keyword; asdl_seq* a; if ( @@ -2386,8 +2831,10 @@ del_stmt_rule(Parser *p) (a = del_targets_rule(p)) // del_targets ) { + D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' del_targets")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2397,14 +2844,18 @@ del_stmt_rule(Parser *p) _res = _Py_Delete ( a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'del' del_targets")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2412,41 +2863,54 @@ del_stmt_rule(Parser *p) static stmt_ty import_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; { // import_name if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_name")); stmt_ty import_name_var; if ( (import_name_var = import_name_rule(p)) // import_name ) { + D(fprintf(stderr, "%*c+ import_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_name")); _res = import_name_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_name")); } { // import_from if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from")); stmt_ty import_from_var; if ( (import_from_var = import_from_rule(p)) // import_from ) { + D(fprintf(stderr, "%*c+ import_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from")); _res = import_from_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2454,13 +2918,16 @@ import_stmt_rule(Parser *p) static stmt_ty import_name_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -2469,8 +2936,10 @@ import_name_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'import' dotted_as_names if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import' dotted_as_names")); Token * _keyword; asdl_seq* a; if ( @@ -2479,8 +2948,10 @@ import_name_rule(Parser *p) (a = dotted_as_names_rule(p)) // dotted_as_names ) { + D(fprintf(stderr, "%*c+ import_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import' dotted_as_names")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2490,14 +2961,18 @@ import_name_rule(Parser *p) _res = _Py_Import ( a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import' dotted_as_names")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2507,13 +2982,16 @@ import_name_rule(Parser *p) static stmt_ty import_from_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -2522,8 +3000,10 @@ import_from_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); Token * _keyword; Token * _keyword_1; asdl_seq * a; @@ -2541,8 +3021,10 @@ import_from_rule(Parser *p) (c = import_from_targets_rule(p)) // import_from_targets ) { + D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2552,16 +3034,21 @@ import_from_rule(Parser *p) _res = _Py_ImportFrom ( b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); } { // 'from' (('.' | '...'))+ 'import' import_from_targets if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); Token * _keyword; Token * _keyword_1; asdl_seq * a; @@ -2576,8 +3063,10 @@ import_from_rule(Parser *p) (b = import_from_targets_rule(p)) // import_from_targets ) { + D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2587,14 +3076,18 @@ import_from_rule(Parser *p) _res = _Py_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2606,15 +3099,19 @@ import_from_rule(Parser *p) static asdl_seq* import_from_targets_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // '(' import_from_as_names ','? ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' import_from_as_names ','? ')'")); Token * _literal; Token * _literal_1; void *_opt_var; @@ -2630,19 +3127,25 @@ import_from_targets_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' import_from_as_names ','? ')'")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' import_from_as_names ','? ')'")); } { // import_from_as_names !',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_names !','")); asdl_seq* import_from_as_names_var; if ( (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names @@ -2650,45 +3153,60 @@ import_from_targets_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { + D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_names !','")); _res = import_from_as_names_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_names !','")); } { // '*' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { + D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); _res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); } { // invalid_import_from_targets if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_import_from_targets")); void *invalid_import_from_targets_var; if ( (invalid_import_from_targets_var = invalid_import_from_targets_rule(p)) // invalid_import_from_targets ) { + D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_import_from_targets")); _res = invalid_import_from_targets_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_import_from_targets")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2696,31 +3214,40 @@ import_from_targets_rule(Parser *p) static asdl_seq* import_from_as_names_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.import_from_as_name+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_from_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); asdl_seq * a; if ( (a = _gather_32_rule(p)) // ','.import_from_as_name+ ) { + D(fprintf(stderr, "%*c+ import_from_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_as_names[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.import_from_as_name+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2728,15 +3255,19 @@ import_from_as_names_rule(Parser *p) static alias_ty import_from_as_name_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } alias_ty _res = NULL; int _mark = p->mark; { // NAME ['as' NAME] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> import_from_as_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]")); expr_ty a; void *b; if ( @@ -2745,17 +3276,22 @@ import_from_as_name_rule(Parser *p) (b = _tmp_34_rule(p), 1) // ['as' NAME] ) { + D(fprintf(stderr, "%*c+ import_from_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]")); _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_as_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ['as' NAME]")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2763,31 +3299,40 @@ import_from_as_name_rule(Parser *p) static asdl_seq* dotted_as_names_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.dotted_as_name+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> dotted_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); asdl_seq * a; if ( (a = _gather_35_rule(p)) // ','.dotted_as_name+ ) { + D(fprintf(stderr, "%*c+ dotted_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s dotted_as_names[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.dotted_as_name+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2795,15 +3340,19 @@ dotted_as_names_rule(Parser *p) static alias_ty dotted_as_name_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } alias_ty _res = NULL; int _mark = p->mark; { // dotted_name ['as' NAME] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> dotted_as_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]")); expr_ty a; void *b; if ( @@ -2812,17 +3361,22 @@ dotted_as_name_rule(Parser *p) (b = _tmp_37_rule(p), 1) // ['as' NAME] ) { + D(fprintf(stderr, "%*c+ dotted_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]")); _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s dotted_as_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name ['as' NAME]")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2832,14 +3386,18 @@ static expr_ty dotted_name_raw(Parser *); static expr_ty dotted_name_rule(Parser *p) { + D(p->level++); expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, dotted_name_type, &_res)) + if (_PyPegen_is_memoized(p, dotted_name_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; int _resmark = p->mark; while (1) { int tmpvar_0 = _PyPegen_update_memo(p, _mark, dotted_name_type, _res); if (tmpvar_0) { + D(p->level--); return _res; } p->mark = _mark; @@ -2850,20 +3408,25 @@ dotted_name_rule(Parser *p) _res = _raw; } p->mark = _resmark; + D(p->level--); return _res; } static expr_ty dotted_name_raw(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; { // dotted_name '.' NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> dotted_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name '.' NAME")); Token * _literal; expr_ty a; expr_ty b; @@ -2875,31 +3438,41 @@ dotted_name_raw(Parser *p) (b = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ dotted_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name '.' NAME")); _res = _PyPegen_join_names_with_dot ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s dotted_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name '.' NAME")); } { // NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> dotted_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); expr_ty name_var; if ( (name_var = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ dotted_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); _res = name_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s dotted_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); } _res = NULL; done: + D(p->level--); return _res; } @@ -2909,13 +3482,16 @@ dotted_name_raw(Parser *p) static stmt_ty if_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -2924,8 +3500,10 @@ if_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'if' named_expression ':' block elif_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> if_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block elif_stmt")); Token * _keyword; Token * _literal; expr_ty a; @@ -2943,8 +3521,10 @@ if_stmt_rule(Parser *p) (c = elif_stmt_rule(p)) // elif_stmt ) { + D(fprintf(stderr, "%*c+ if_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block elif_stmt")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2954,16 +3534,21 @@ if_stmt_rule(Parser *p) _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s if_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' named_expression ':' block elif_stmt")); } { // 'if' named_expression ':' block else_block? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> if_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block else_block?")); Token * _keyword; Token * _literal; expr_ty a; @@ -2981,8 +3566,10 @@ if_stmt_rule(Parser *p) (c = else_block_rule(p), 1) // else_block? ) { + D(fprintf(stderr, "%*c+ if_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block else_block?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -2992,14 +3579,18 @@ if_stmt_rule(Parser *p) _res = _Py_If ( a , b , c , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s if_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' named_expression ':' block else_block?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3009,13 +3600,16 @@ if_stmt_rule(Parser *p) static stmt_ty elif_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -3024,8 +3618,10 @@ elif_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'elif' named_expression ':' block elif_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> elif_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block elif_stmt")); Token * _keyword; Token * _literal; expr_ty a; @@ -3043,8 +3639,10 @@ elif_stmt_rule(Parser *p) (c = elif_stmt_rule(p)) // elif_stmt ) { + D(fprintf(stderr, "%*c+ elif_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block elif_stmt")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3054,16 +3652,21 @@ elif_stmt_rule(Parser *p) _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s elif_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'elif' named_expression ':' block elif_stmt")); } { // 'elif' named_expression ':' block else_block? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> elif_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block else_block?")); Token * _keyword; Token * _literal; expr_ty a; @@ -3081,8 +3684,10 @@ elif_stmt_rule(Parser *p) (c = else_block_rule(p), 1) // else_block? ) { + D(fprintf(stderr, "%*c+ elif_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block else_block?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3092,14 +3697,18 @@ elif_stmt_rule(Parser *p) _res = _Py_If ( a , b , c , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s elif_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'elif' named_expression ':' block else_block?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3107,15 +3716,19 @@ elif_stmt_rule(Parser *p) static asdl_seq* else_block_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // 'else' ':' block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> else_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else' ':' block")); Token * _keyword; Token * _literal; asdl_seq* b; @@ -3127,17 +3740,22 @@ else_block_rule(Parser *p) (b = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ else_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else' ':' block")); _res = b; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s else_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else' ':' block")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3145,13 +3763,16 @@ else_block_rule(Parser *p) static stmt_ty while_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -3160,8 +3781,10 @@ while_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'while' named_expression ':' block else_block? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> while_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'while' named_expression ':' block else_block?")); Token * _keyword; Token * _literal; expr_ty a; @@ -3179,8 +3802,10 @@ while_stmt_rule(Parser *p) (c = else_block_rule(p), 1) // else_block? ) { + D(fprintf(stderr, "%*c+ while_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'while' named_expression ':' block else_block?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3190,14 +3815,18 @@ while_stmt_rule(Parser *p) _res = _Py_While ( a , b , c , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s while_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'while' named_expression ':' block else_block?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3207,13 +3836,16 @@ while_stmt_rule(Parser *p) static stmt_ty for_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -3222,8 +3854,10 @@ for_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); Token * _keyword; Token * _keyword_1; Token * _literal; @@ -3250,8 +3884,10 @@ for_stmt_rule(Parser *p) (el = else_block_rule(p), 1) // else_block? ) { + D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3261,16 +3897,21 @@ for_stmt_rule(Parser *p) _res = _Py_For ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); } { // ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); Token * _keyword; Token * _keyword_1; Token * _literal; @@ -3300,8 +3941,10 @@ for_stmt_rule(Parser *p) (el = else_block_rule(p), 1) // else_block? ) { + D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3311,14 +3954,18 @@ for_stmt_rule(Parser *p) _res = CHECK_VERSION ( 5 , "Async for loops are" , _Py_AsyncFor ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3330,13 +3977,16 @@ for_stmt_rule(Parser *p) static stmt_ty with_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -3345,8 +3995,10 @@ with_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'with' '(' ','.with_item+ ','? ')' ':' block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with' '(' ','.with_item+ ','? ')' ':' block")); Token * _keyword; Token * _literal; Token * _literal_1; @@ -3371,8 +4023,10 @@ with_stmt_rule(Parser *p) (b = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with' '(' ','.with_item+ ','? ')' ':' block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3382,16 +4036,21 @@ with_stmt_rule(Parser *p) _res = _Py_With ( a , b , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with' '(' ','.with_item+ ','? ')' ':' block")); } { // 'with' ','.with_item+ ':' TYPE_COMMENT? block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); Token * _keyword; Token * _literal; asdl_seq * a; @@ -3409,8 +4068,10 @@ with_stmt_rule(Parser *p) (b = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3420,16 +4081,21 @@ with_stmt_rule(Parser *p) _res = _Py_With ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); } { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); Token * _keyword; Token * _literal; Token * _literal_1; @@ -3457,8 +4123,10 @@ with_stmt_rule(Parser *p) (b = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3468,16 +4136,21 @@ with_stmt_rule(Parser *p) _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NULL , EXTRA ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); } { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); Token * _keyword; Token * _literal; asdl_seq * a; @@ -3498,8 +4171,10 @@ with_stmt_rule(Parser *p) (b = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3509,14 +4184,18 @@ with_stmt_rule(Parser *p) _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3524,15 +4203,19 @@ with_stmt_rule(Parser *p) static withitem_ty with_item_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } withitem_ty _res = NULL; int _mark = p->mark; { // expression ['as' target] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> with_item[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' target]")); expr_ty e; void *o; if ( @@ -3541,17 +4224,22 @@ with_item_rule(Parser *p) (o = _tmp_46_rule(p), 1) // ['as' target] ) { + D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' target]")); _res = _Py_withitem ( e , o , p -> arena ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s with_item[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' target]")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3561,13 +4249,16 @@ with_item_rule(Parser *p) static stmt_ty try_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -3576,8 +4267,10 @@ try_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'try' ':' block finally_block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block finally_block")); Token * _keyword; Token * _literal; asdl_seq* b; @@ -3592,8 +4285,10 @@ try_stmt_rule(Parser *p) (f = finally_block_rule(p)) // finally_block ) { + D(fprintf(stderr, "%*c+ try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block finally_block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3603,16 +4298,21 @@ try_stmt_rule(Parser *p) _res = _Py_Try ( b , NULL , NULL , f , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s try_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block finally_block")); } { // 'try' ':' block except_block+ else_block? finally_block? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block except_block+ else_block? finally_block?")); Token * _keyword; Token * _literal; asdl_seq* b; @@ -3633,8 +4333,10 @@ try_stmt_rule(Parser *p) (f = finally_block_rule(p), 1) // finally_block? ) { + D(fprintf(stderr, "%*c+ try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block except_block+ else_block? finally_block?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3644,14 +4346,18 @@ try_stmt_rule(Parser *p) _res = _Py_Try ( b , ex , el , f , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s try_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block except_block+ else_block? finally_block?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3659,13 +4365,16 @@ try_stmt_rule(Parser *p) static excepthandler_ty except_block_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } excepthandler_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -3674,8 +4383,10 @@ except_block_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'except' expression ['as' NAME] ':' block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> except_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expression ['as' NAME] ':' block")); Token * _keyword; Token * _literal; asdl_seq* b; @@ -3693,8 +4404,10 @@ except_block_rule(Parser *p) (b = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ except_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expression ['as' NAME] ':' block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3704,16 +4417,21 @@ except_block_rule(Parser *p) _res = _Py_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s except_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expression ['as' NAME] ':' block")); } { // 'except' ':' block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> except_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' ':' block")); Token * _keyword; Token * _literal; asdl_seq* b; @@ -3725,8 +4443,10 @@ except_block_rule(Parser *p) (b = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ except_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' ':' block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3736,14 +4456,18 @@ except_block_rule(Parser *p) _res = _Py_ExceptHandler ( NULL , NULL , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s except_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' ':' block")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3751,15 +4475,19 @@ except_block_rule(Parser *p) static asdl_seq* finally_block_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // 'finally' ':' block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> finally_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally' ':' block")); Token * _keyword; Token * _literal; asdl_seq* a; @@ -3771,17 +4499,22 @@ finally_block_rule(Parser *p) (a = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ finally_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally' ':' block")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s finally_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally' ':' block")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3789,13 +4522,16 @@ finally_block_rule(Parser *p) static stmt_ty return_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -3804,8 +4540,10 @@ return_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'return' star_expressions? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> return_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'return' star_expressions?")); Token * _keyword; void *a; if ( @@ -3814,8 +4552,10 @@ return_stmt_rule(Parser *p) (a = star_expressions_rule(p), 1) // star_expressions? ) { + D(fprintf(stderr, "%*c+ return_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'return' star_expressions?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3825,14 +4565,18 @@ return_stmt_rule(Parser *p) _res = _Py_Return ( a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s return_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'return' star_expressions?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3840,13 +4584,16 @@ return_stmt_rule(Parser *p) static stmt_ty raise_stmt_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -3855,8 +4602,10 @@ raise_stmt_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'raise' expression ['from' expression] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]")); Token * _keyword; expr_ty a; void *b; @@ -3868,8 +4617,10 @@ raise_stmt_rule(Parser *p) (b = _tmp_49_rule(p), 1) // ['from' expression] ) { + D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3879,23 +4630,30 @@ raise_stmt_rule(Parser *p) _res = _Py_Raise ( a , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise' expression ['from' expression]")); } { // 'raise' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' ) { + D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -3905,14 +4663,18 @@ raise_stmt_rule(Parser *p) _res = _Py_Raise ( NULL , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3920,15 +4682,19 @@ raise_stmt_rule(Parser *p) static stmt_ty function_def_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; { // decorators function_def_raw if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> function_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "decorators function_def_raw")); asdl_seq* d; stmt_ty f; if ( @@ -3937,31 +4703,41 @@ function_def_rule(Parser *p) (f = function_def_raw_rule(p)) // function_def_raw ) { + D(fprintf(stderr, "%*c+ function_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "decorators function_def_raw")); _res = _PyPegen_function_def_decorators ( p , d , f ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s function_def[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "decorators function_def_raw")); } { // function_def_raw if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> function_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "function_def_raw")); stmt_ty function_def_raw_var; if ( (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw ) { + D(fprintf(stderr, "%*c+ function_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "function_def_raw")); _res = function_def_raw_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s function_def[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "function_def_raw")); } _res = NULL; done: + D(p->level--); return _res; } @@ -3971,13 +4747,16 @@ function_def_rule(Parser *p) static stmt_ty function_def_raw_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -3986,8 +4765,10 @@ function_def_raw_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); Token * _keyword; Token * _literal; Token * _literal_1; @@ -4017,8 +4798,10 @@ function_def_raw_rule(Parser *p) (b = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -4028,16 +4811,21 @@ function_def_raw_rule(Parser *p) _res = _Py_FunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); } { // ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); Token * _keyword; Token * _literal; Token * _literal_1; @@ -4070,8 +4858,10 @@ function_def_raw_rule(Parser *p) (b = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -4081,14 +4871,18 @@ function_def_raw_rule(Parser *p) _res = CHECK_VERSION ( 5 , "Async functions are" , _Py_AsyncFunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4099,15 +4893,19 @@ function_def_raw_rule(Parser *p) static Token* func_type_comment_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } Token* _res = NULL; int _mark = p->mark; { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> func_type_comment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); Token * newline_var; Token * t; if ( @@ -4118,45 +4916,60 @@ func_type_comment_rule(Parser *p) _PyPegen_lookahead(1, _tmp_52_rule, p) ) { + D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); _res = t; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s func_type_comment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); } { // invalid_double_type_comments if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> func_type_comment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_double_type_comments")); void *invalid_double_type_comments_var; if ( (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments ) { + D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_double_type_comments")); _res = invalid_double_type_comments_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s func_type_comment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_double_type_comments")); } { // TYPE_COMMENT if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> func_type_comment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT")); Token * type_comment_var; if ( (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' ) { + D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT")); _res = type_comment_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s func_type_comment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "TYPE_COMMENT")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4164,41 +4977,54 @@ func_type_comment_rule(Parser *p) static arguments_ty params_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } arguments_ty _res = NULL; int _mark = p->mark; { // invalid_parameters if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_parameters")); void *invalid_parameters_var; if ( (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters ) { + D(fprintf(stderr, "%*c+ params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_parameters")); _res = invalid_parameters_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s params[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_parameters")); } { // parameters if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "parameters")); arguments_ty parameters_var; if ( (parameters_var = parameters_rule(p)) // parameters ) { + D(fprintf(stderr, "%*c+ params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "parameters")); _res = parameters_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s params[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "parameters")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4211,15 +5037,19 @@ params_rule(Parser *p) static arguments_ty parameters_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } arguments_ty _res = NULL; int _mark = p->mark; { // slash_no_default param_no_default* param_with_default* star_etc? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default param_no_default* param_with_default* star_etc?")); asdl_seq* a; asdl_seq * b; asdl_seq * c; @@ -4234,19 +5064,25 @@ parameters_rule(Parser *p) (d = star_etc_rule(p), 1) // star_etc? ) { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default param_no_default* param_with_default* star_etc?")); _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default param_no_default* param_with_default* star_etc?")); } { // slash_with_default param_with_default* star_etc? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default param_with_default* star_etc?")); SlashWithDefault* a; asdl_seq * b; void *c; @@ -4258,19 +5094,25 @@ parameters_rule(Parser *p) (c = star_etc_rule(p), 1) // star_etc? ) { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default param_with_default* star_etc?")); _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default param_with_default* star_etc?")); } { // param_no_default+ param_with_default* star_etc? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default+ param_with_default* star_etc?")); asdl_seq * a; asdl_seq * b; void *c; @@ -4282,19 +5124,25 @@ parameters_rule(Parser *p) (c = star_etc_rule(p), 1) // star_etc? ) { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default+ param_with_default* star_etc?")); _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default+ param_with_default* star_etc?")); } { // param_with_default+ star_etc? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+ star_etc?")); asdl_seq * a; void *b; if ( @@ -4303,35 +5151,46 @@ parameters_rule(Parser *p) (b = star_etc_rule(p), 1) // star_etc? ) { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+ star_etc?")); _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default+ star_etc?")); } { // star_etc if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_etc")); StarEtc* a; if ( (a = star_etc_rule(p)) // star_etc ) { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_etc")); _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_etc")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4339,15 +5198,19 @@ parameters_rule(Parser *p) static asdl_seq* slash_no_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // param_no_default+ '/' ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' ','")); Token * _literal; Token * _literal_1; asdl_seq * a; @@ -4359,19 +5222,25 @@ slash_no_default_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' ','")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default+ '/' ','")); } { // param_no_default+ '/' &')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' &')'")); Token * _literal; asdl_seq * a; if ( @@ -4382,17 +5251,22 @@ slash_no_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { + D(fprintf(stderr, "%*c+ slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' &')'")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default+ '/' &')'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4402,15 +5276,19 @@ slash_no_default_rule(Parser *p) static SlashWithDefault* slash_with_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } SlashWithDefault* _res = NULL; int _mark = p->mark; { // param_no_default* param_with_default+ '/' ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' ','")); Token * _literal; Token * _literal_1; asdl_seq * a; @@ -4425,19 +5303,25 @@ slash_with_default_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' ','")); _res = _PyPegen_slash_with_default ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default* param_with_default+ '/' ','")); } { // param_no_default* param_with_default+ '/' &')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' &')'")); Token * _literal; asdl_seq * a; asdl_seq * b; @@ -4451,17 +5335,22 @@ slash_with_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { + D(fprintf(stderr, "%*c+ slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' &')'")); _res = _PyPegen_slash_with_default ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default* param_with_default+ '/' &')'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4473,15 +5362,19 @@ slash_with_default_rule(Parser *p) static StarEtc* star_etc_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } StarEtc* _res = NULL; int _mark = p->mark; { // '*' param_no_default param_maybe_default* kwds? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' param_no_default param_maybe_default* kwds?")); Token * _literal; arg_ty a; asdl_seq * b; @@ -4496,19 +5389,25 @@ star_etc_rule(Parser *p) (c = kwds_rule(p), 1) // kwds? ) { + D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' param_no_default param_maybe_default* kwds?")); _res = _PyPegen_star_etc ( p , a , b , c ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' param_no_default param_maybe_default* kwds?")); } { // '*' ',' param_maybe_default+ kwds? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' ',' param_maybe_default+ kwds?")); Token * _literal; Token * _literal_1; asdl_seq * b; @@ -4523,49 +5422,65 @@ star_etc_rule(Parser *p) (c = kwds_rule(p), 1) // kwds? ) { + D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' ',' param_maybe_default+ kwds?")); _res = _PyPegen_star_etc ( p , NULL , b , c ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' ',' param_maybe_default+ kwds?")); } { // kwds if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwds")); arg_ty a; if ( (a = kwds_rule(p)) // kwds ) { + D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwds")); _res = _PyPegen_star_etc ( p , NULL , NULL , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwds")); } { // invalid_star_etc if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_star_etc")); void *invalid_star_etc_var; if ( (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc ) { + D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_star_etc")); _res = invalid_star_etc_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_star_etc")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4573,15 +5488,19 @@ star_etc_rule(Parser *p) static arg_ty kwds_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } arg_ty _res = NULL; int _mark = p->mark; { // '**' param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwds[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' param_no_default")); Token * _literal; arg_ty a; if ( @@ -4590,17 +5509,22 @@ kwds_rule(Parser *p) (a = param_no_default_rule(p)) // param_no_default ) { + D(fprintf(stderr, "%*c+ kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param_no_default")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwds[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' param_no_default")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4608,15 +5532,19 @@ kwds_rule(Parser *p) static arg_ty param_no_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } arg_ty _res = NULL; int _mark = p->mark; { // param ',' TYPE_COMMENT? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param ',' TYPE_COMMENT?")); Token * _literal; arg_ty a; void *tc; @@ -4628,19 +5556,25 @@ param_no_default_rule(Parser *p) (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { + D(fprintf(stderr, "%*c+ param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param ',' TYPE_COMMENT?")); _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s param_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param ',' TYPE_COMMENT?")); } { // param TYPE_COMMENT? &')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param TYPE_COMMENT? &')'")); arg_ty a; void *tc; if ( @@ -4651,17 +5585,22 @@ param_no_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { + D(fprintf(stderr, "%*c+ param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param TYPE_COMMENT? &')'")); _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s param_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param TYPE_COMMENT? &')'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4669,15 +5608,19 @@ param_no_default_rule(Parser *p) static NameDefaultPair* param_with_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } NameDefaultPair* _res = NULL; int _mark = p->mark; { // param default ',' TYPE_COMMENT? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default ',' TYPE_COMMENT?")); Token * _literal; arg_ty a; expr_ty c; @@ -4692,19 +5635,25 @@ param_with_default_rule(Parser *p) (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { + D(fprintf(stderr, "%*c+ param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default ',' TYPE_COMMENT?")); _res = _PyPegen_name_default_pair ( p , a , c , tc ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s param_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default ',' TYPE_COMMENT?")); } { // param default TYPE_COMMENT? &')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default TYPE_COMMENT? &')'")); arg_ty a; expr_ty c; void *tc; @@ -4718,17 +5667,22 @@ param_with_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { + D(fprintf(stderr, "%*c+ param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default TYPE_COMMENT? &')'")); _res = _PyPegen_name_default_pair ( p , a , c , tc ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s param_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default TYPE_COMMENT? &')'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4738,15 +5692,19 @@ param_with_default_rule(Parser *p) static NameDefaultPair* param_maybe_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } NameDefaultPair* _res = NULL; int _mark = p->mark; { // param default? ',' TYPE_COMMENT? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default? ',' TYPE_COMMENT?")); Token * _literal; arg_ty a; void *c; @@ -4761,19 +5719,25 @@ param_maybe_default_rule(Parser *p) (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { + D(fprintf(stderr, "%*c+ param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default? ',' TYPE_COMMENT?")); _res = _PyPegen_name_default_pair ( p , a , c , tc ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default? ',' TYPE_COMMENT?")); } { // param default? TYPE_COMMENT? &')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default? TYPE_COMMENT? &')'")); arg_ty a; void *c; void *tc; @@ -4787,17 +5751,22 @@ param_maybe_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { + D(fprintf(stderr, "%*c+ param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default? TYPE_COMMENT? &')'")); _res = _PyPegen_name_default_pair ( p , a , c , tc ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default? TYPE_COMMENT? &')'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4805,13 +5774,16 @@ param_maybe_default_rule(Parser *p) static arg_ty param_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } arg_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -4820,8 +5792,10 @@ param_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME annotation? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> param[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME annotation?")); expr_ty a; void *b; if ( @@ -4830,8 +5804,10 @@ param_rule(Parser *p) (b = annotation_rule(p), 1) // annotation? ) { + D(fprintf(stderr, "%*c+ param[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME annotation?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -4841,14 +5817,18 @@ param_rule(Parser *p) _res = _Py_arg ( a -> v . Name . id , b , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s param[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME annotation?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4856,15 +5836,19 @@ param_rule(Parser *p) static expr_ty annotation_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; { // ':' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> annotation[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression")); Token * _literal; expr_ty a; if ( @@ -4873,17 +5857,22 @@ annotation_rule(Parser *p) (a = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ annotation[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s annotation[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4891,15 +5880,19 @@ annotation_rule(Parser *p) static expr_ty default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; { // '=' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' expression")); Token * _literal; expr_ty a; if ( @@ -4908,17 +5901,22 @@ default_rule(Parser *p) (a = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' expression")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4926,31 +5924,40 @@ default_rule(Parser *p) static asdl_seq* decorators_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // (('@' named_expression NEWLINE))+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> decorators[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); asdl_seq * a; if ( (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ ) { + D(fprintf(stderr, "%*c+ decorators[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s decorators[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(('@' named_expression NEWLINE))+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -4958,15 +5965,19 @@ decorators_rule(Parser *p) static stmt_ty class_def_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; { // decorators class_def_raw if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> class_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "decorators class_def_raw")); asdl_seq* a; stmt_ty b; if ( @@ -4975,31 +5986,41 @@ class_def_rule(Parser *p) (b = class_def_raw_rule(p)) // class_def_raw ) { + D(fprintf(stderr, "%*c+ class_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "decorators class_def_raw")); _res = _PyPegen_class_def_decorators ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s class_def[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "decorators class_def_raw")); } { // class_def_raw if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> class_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "class_def_raw")); stmt_ty class_def_raw_var; if ( (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw ) { + D(fprintf(stderr, "%*c+ class_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "class_def_raw")); _res = class_def_raw_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s class_def[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "class_def_raw")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5007,13 +6028,16 @@ class_def_rule(Parser *p) static stmt_ty class_def_raw_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } stmt_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -5022,8 +6046,10 @@ class_def_raw_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'class' NAME ['(' arguments? ')'] ':' block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> class_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class' NAME ['(' arguments? ')'] ':' block")); Token * _keyword; Token * _literal; expr_ty a; @@ -5041,8 +6067,10 @@ class_def_raw_rule(Parser *p) (c = block_rule(p)) // block ) { + D(fprintf(stderr, "%*c+ class_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class' NAME ['(' arguments? ')'] ':' block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5052,14 +6080,18 @@ class_def_raw_rule(Parser *p) _res = _Py_ClassDef ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , c , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s class_def_raw[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'class' NAME ['(' arguments? ')'] ':' block")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5067,17 +6099,23 @@ class_def_raw_rule(Parser *p) static asdl_seq* block_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; - if (_PyPegen_is_memoized(p, block_type, &_res)) + if (_PyPegen_is_memoized(p, block_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; { // NEWLINE INDENT statements DEDENT if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT statements DEDENT")); asdl_seq* a; Token * dedent_var; Token * indent_var; @@ -5092,46 +6130,61 @@ block_rule(Parser *p) (dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT' ) { + D(fprintf(stderr, "%*c+ block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT statements DEDENT")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT statements DEDENT")); } { // simple_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt")); asdl_seq* simple_stmt_var; if ( (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { + D(fprintf(stderr, "%*c+ block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt")); _res = simple_stmt_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt")); } { // invalid_block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_block")); void *invalid_block_var; if ( (invalid_block_var = invalid_block_rule(p)) // invalid_block ) { + D(fprintf(stderr, "%*c+ block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_block")); _res = invalid_block_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_block")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, block_type, _res); + D(p->level--); return _res; } @@ -5139,15 +6192,19 @@ block_rule(Parser *p) static asdl_seq* expressions_list_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.star_expression+ ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> expressions_list[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.star_expression+ ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -5157,17 +6214,22 @@ expressions_list_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ expressions_list[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.star_expression+ ','?")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s expressions_list[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.star_expression+ ','?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5178,13 +6240,16 @@ expressions_list_rule(Parser *p) static expr_ty star_expressions_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -5193,8 +6258,10 @@ star_expressions_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_expression ((',' star_expression))+ ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression ((',' star_expression))+ ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty a; @@ -5207,8 +6274,10 @@ star_expressions_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ star_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression ((',' star_expression))+ ','?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5218,16 +6287,21 @@ star_expressions_rule(Parser *p) _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression ((',' star_expression))+ ','?")); } { // star_expression ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression ','")); Token * _literal; expr_ty a; if ( @@ -5236,8 +6310,10 @@ star_expressions_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ star_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression ','")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5247,28 +6323,37 @@ star_expressions_rule(Parser *p) _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression ','")); } { // star_expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression")); expr_ty star_expression_var; if ( (star_expression_var = star_expression_rule(p)) // star_expression ) { + D(fprintf(stderr, "%*c+ star_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression")); _res = star_expression_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5276,15 +6361,20 @@ star_expressions_rule(Parser *p) static expr_ty star_expression_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, star_expression_type, &_res)) + if (_PyPegen_is_memoized(p, star_expression_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -5293,8 +6383,10 @@ star_expression_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); Token * _literal; expr_ty a; if ( @@ -5303,8 +6395,10 @@ star_expression_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ star_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5314,29 +6408,38 @@ star_expression_rule(Parser *p) _res = _Py_Starred ( a , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' bitwise_or")); } { // expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ star_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); _res = expression_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, star_expression_type, _res); + D(p->level--); return _res; } @@ -5344,15 +6447,19 @@ star_expression_rule(Parser *p) static asdl_seq* star_named_expressions_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.star_named_expression+ ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_named_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.star_named_expression+ ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -5362,17 +6469,22 @@ star_named_expressions_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ star_named_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.star_named_expression+ ','?")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_named_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.star_named_expression+ ','?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5380,13 +6492,16 @@ star_named_expressions_rule(Parser *p) static expr_ty star_named_expression_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -5395,8 +6510,10 @@ star_named_expression_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); Token * _literal; expr_ty a; if ( @@ -5405,8 +6522,10 @@ star_named_expression_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ star_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5416,28 +6535,37 @@ star_named_expression_rule(Parser *p) _res = _Py_Starred ( a , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' bitwise_or")); } { // named_expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); expr_ty named_expression_var; if ( (named_expression_var = named_expression_rule(p)) // named_expression ) { + D(fprintf(stderr, "%*c+ star_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); _res = named_expression_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5445,13 +6573,16 @@ star_named_expression_rule(Parser *p) static expr_ty named_expression_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -5460,8 +6591,10 @@ named_expression_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':=' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ':=' expression")); Token * _literal; expr_ty a; expr_ty b; @@ -5473,8 +6606,10 @@ named_expression_rule(Parser *p) (b = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':=' expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5484,16 +6619,21 @@ named_expression_rule(Parser *p) _res = _Py_NamedExpr ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ':=' expression")); } { // expression !':=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -5501,27 +6641,36 @@ named_expression_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { + D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); _res = expression_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } { // invalid_named_expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_named_expression")); void *invalid_named_expression_var; if ( (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression ) { + D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_named_expression")); _res = invalid_named_expression_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_named_expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5529,41 +6678,54 @@ named_expression_rule(Parser *p) static expr_ty annotated_rhs_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; { // yield_expr if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> annotated_rhs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { + D(fprintf(stderr, "%*c+ annotated_rhs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s annotated_rhs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> annotated_rhs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { + D(fprintf(stderr, "%*c+ annotated_rhs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s annotated_rhs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5571,13 +6733,16 @@ annotated_rhs_rule(Parser *p) static expr_ty expressions_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -5586,8 +6751,10 @@ expressions_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression ((',' expression))+ ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ((',' expression))+ ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty a; @@ -5600,8 +6767,10 @@ expressions_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ((',' expression))+ ','?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5611,16 +6780,21 @@ expressions_rule(Parser *p) _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ((',' expression))+ ','?")); } { // expression ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ','")); Token * _literal; expr_ty a; if ( @@ -5629,8 +6803,10 @@ expressions_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ','")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5640,28 +6816,37 @@ expressions_rule(Parser *p) _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ','")); } { // expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); _res = expression_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5669,15 +6854,20 @@ expressions_rule(Parser *p) static expr_ty expression_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, expression_type, &_res)) + if (_PyPegen_is_memoized(p, expression_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -5686,8 +6876,10 @@ expression_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // disjunction 'if' disjunction 'else' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction 'else' expression")); Token * _keyword; Token * _keyword_1; expr_ty a; @@ -5705,8 +6897,10 @@ expression_rule(Parser *p) (c = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction 'else' expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5716,43 +6910,57 @@ expression_rule(Parser *p) _res = _Py_IfExp ( b , a , c , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "disjunction 'if' disjunction 'else' expression")); } { // disjunction if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "disjunction")); expr_ty disjunction_var; if ( (disjunction_var = disjunction_rule(p)) // disjunction ) { + D(fprintf(stderr, "%*c+ expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction")); _res = disjunction_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "disjunction")); } { // lambdef if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambdef")); expr_ty lambdef_var; if ( (lambdef_var = lambdef_rule(p)) // lambdef ) { + D(fprintf(stderr, "%*c+ expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambdef")); _res = lambdef_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambdef")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, expression_type, _res); + D(p->level--); return _res; } @@ -5760,13 +6968,16 @@ expression_rule(Parser *p) static expr_ty lambdef_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -5775,8 +6986,10 @@ lambdef_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'lambda' lambda_parameters? ':' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambdef[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_parameters? ':' expression")); Token * _keyword; Token * _literal; void *a; @@ -5791,8 +7004,10 @@ lambdef_rule(Parser *p) (b = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ lambdef[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_parameters? ':' expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -5802,14 +7017,18 @@ lambdef_rule(Parser *p) _res = _Py_Lambda ( ( a ) ? a : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambdef[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'lambda' lambda_parameters? ':' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5822,15 +7041,19 @@ lambdef_rule(Parser *p) static arguments_ty lambda_parameters_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } arguments_ty _res = NULL; int _mark = p->mark; { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?")); asdl_seq* a; asdl_seq * b; asdl_seq * c; @@ -5845,19 +7068,25 @@ lambda_parameters_rule(Parser *p) (d = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?")); _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?")); } { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default lambda_param_with_default* lambda_star_etc?")); SlashWithDefault* a; asdl_seq * b; void *c; @@ -5869,19 +7098,25 @@ lambda_parameters_rule(Parser *p) (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default lambda_param_with_default* lambda_star_etc?")); _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default lambda_param_with_default* lambda_star_etc?")); } { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?")); asdl_seq * a; asdl_seq * b; void *c; @@ -5893,19 +7128,25 @@ lambda_parameters_rule(Parser *p) (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?")); _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?")); } { // lambda_param_with_default+ lambda_star_etc? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+ lambda_star_etc?")); asdl_seq * a; void *b; if ( @@ -5914,35 +7155,46 @@ lambda_parameters_rule(Parser *p) (b = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+ lambda_star_etc?")); _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default+ lambda_star_etc?")); } { // lambda_star_etc if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_star_etc")); StarEtc* a; if ( (a = lambda_star_etc_rule(p)) // lambda_star_etc ) { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_star_etc")); _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_star_etc")); } _res = NULL; done: + D(p->level--); return _res; } @@ -5952,15 +7204,19 @@ lambda_parameters_rule(Parser *p) static asdl_seq* lambda_slash_no_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // lambda_param_no_default+ '/' ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' ','")); Token * _literal; Token * _literal_1; asdl_seq * a; @@ -5972,19 +7228,25 @@ lambda_slash_no_default_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ lambda_slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' ','")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default+ '/' ','")); } { // lambda_param_no_default+ '/' &':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' &':'")); Token * _literal; asdl_seq * a; if ( @@ -5995,17 +7257,22 @@ lambda_slash_no_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { + D(fprintf(stderr, "%*c+ lambda_slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' &':'")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default+ '/' &':'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6015,15 +7282,19 @@ lambda_slash_no_default_rule(Parser *p) static SlashWithDefault* lambda_slash_with_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } SlashWithDefault* _res = NULL; int _mark = p->mark; { // lambda_param_no_default* lambda_param_with_default+ '/' ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' ','")); Token * _literal; Token * _literal_1; asdl_seq * a; @@ -6038,19 +7309,25 @@ lambda_slash_with_default_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ lambda_slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' ','")); _res = _PyPegen_slash_with_default ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' ','")); } { // lambda_param_no_default* lambda_param_with_default+ '/' &':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' &':'")); Token * _literal; asdl_seq * a; asdl_seq * b; @@ -6064,17 +7341,22 @@ lambda_slash_with_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { + D(fprintf(stderr, "%*c+ lambda_slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' &':'")); _res = _PyPegen_slash_with_default ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' &':'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6086,15 +7368,19 @@ lambda_slash_with_default_rule(Parser *p) static StarEtc* lambda_star_etc_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } StarEtc* _res = NULL; int _mark = p->mark; { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?")); Token * _literal; arg_ty a; asdl_seq * b; @@ -6109,19 +7395,25 @@ lambda_star_etc_rule(Parser *p) (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { + D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?")); _res = _PyPegen_star_etc ( p , a , b , c ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?")); } { // '*' ',' lambda_param_maybe_default+ lambda_kwds? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' ',' lambda_param_maybe_default+ lambda_kwds?")); Token * _literal; Token * _literal_1; asdl_seq * b; @@ -6136,49 +7428,65 @@ lambda_star_etc_rule(Parser *p) (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { + D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' ',' lambda_param_maybe_default+ lambda_kwds?")); _res = _PyPegen_star_etc ( p , NULL , b , c ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' ',' lambda_param_maybe_default+ lambda_kwds?")); } { // lambda_kwds if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_kwds")); arg_ty a; if ( (a = lambda_kwds_rule(p)) // lambda_kwds ) { + D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_kwds")); _res = _PyPegen_star_etc ( p , NULL , NULL , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_kwds")); } { // invalid_lambda_star_etc if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_lambda_star_etc")); void *invalid_lambda_star_etc_var; if ( (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc ) { + D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_lambda_star_etc")); _res = invalid_lambda_star_etc_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_lambda_star_etc")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6186,15 +7494,19 @@ lambda_star_etc_rule(Parser *p) static arg_ty lambda_kwds_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } arg_ty _res = NULL; int _mark = p->mark; { // '**' lambda_param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_kwds[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' lambda_param_no_default")); Token * _literal; arg_ty a; if ( @@ -6203,17 +7515,22 @@ lambda_kwds_rule(Parser *p) (a = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { + D(fprintf(stderr, "%*c+ lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param_no_default")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_kwds[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' lambda_param_no_default")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6221,15 +7538,19 @@ lambda_kwds_rule(Parser *p) static arg_ty lambda_param_no_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } arg_ty _res = NULL; int _mark = p->mark; { // lambda_param ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param ','")); Token * _literal; arg_ty a; if ( @@ -6238,19 +7559,25 @@ lambda_param_no_default_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ lambda_param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param ','")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param ','")); } { // lambda_param &':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param &':'")); arg_ty a; if ( (a = lambda_param_rule(p)) // lambda_param @@ -6258,17 +7585,22 @@ lambda_param_no_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { + D(fprintf(stderr, "%*c+ lambda_param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param &':'")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param &':'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6276,15 +7608,19 @@ lambda_param_no_default_rule(Parser *p) static NameDefaultPair* lambda_param_with_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } NameDefaultPair* _res = NULL; int _mark = p->mark; { // lambda_param default ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default ','")); Token * _literal; arg_ty a; expr_ty c; @@ -6296,19 +7632,25 @@ lambda_param_with_default_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ lambda_param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default ','")); _res = _PyPegen_name_default_pair ( p , a , c , NULL ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default ','")); } { // lambda_param default &':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default &':'")); arg_ty a; expr_ty c; if ( @@ -6319,17 +7661,22 @@ lambda_param_with_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { + D(fprintf(stderr, "%*c+ lambda_param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default &':'")); _res = _PyPegen_name_default_pair ( p , a , c , NULL ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default &':'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6337,15 +7684,19 @@ lambda_param_with_default_rule(Parser *p) static NameDefaultPair* lambda_param_maybe_default_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } NameDefaultPair* _res = NULL; int _mark = p->mark; { // lambda_param default? ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default? ','")); Token * _literal; arg_ty a; void *c; @@ -6357,19 +7708,25 @@ lambda_param_maybe_default_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ lambda_param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default? ','")); _res = _PyPegen_name_default_pair ( p , a , c , NULL ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default? ','")); } { // lambda_param default? &':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default? &':'")); arg_ty a; void *c; if ( @@ -6380,17 +7737,22 @@ lambda_param_maybe_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { + D(fprintf(stderr, "%*c+ lambda_param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default? &':'")); _res = _PyPegen_name_default_pair ( p , a , c , NULL ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default? &':'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6398,13 +7760,16 @@ lambda_param_maybe_default_rule(Parser *p) static arg_ty lambda_param_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } arg_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -6413,15 +7778,19 @@ lambda_param_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lambda_param[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ lambda_param[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -6431,14 +7800,18 @@ lambda_param_rule(Parser *p) _res = _Py_arg ( a -> v . Name . id , NULL , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6446,15 +7819,20 @@ lambda_param_rule(Parser *p) static expr_ty disjunction_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, disjunction_type, &_res)) + if (_PyPegen_is_memoized(p, disjunction_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -6463,8 +7841,10 @@ disjunction_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // conjunction (('or' conjunction))+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> disjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+")); expr_ty a; asdl_seq * b; if ( @@ -6473,8 +7853,10 @@ disjunction_rule(Parser *p) (b = _loop1_89_rule(p)) // (('or' conjunction))+ ) { + D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -6484,29 +7866,38 @@ disjunction_rule(Parser *p) _res = _Py_BoolOp ( Or , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s disjunction[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "conjunction (('or' conjunction))+")); } { // conjunction if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> disjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "conjunction")); expr_ty conjunction_var; if ( (conjunction_var = conjunction_rule(p)) // conjunction ) { + D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction")); _res = conjunction_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s disjunction[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "conjunction")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, disjunction_type, _res); + D(p->level--); return _res; } @@ -6514,15 +7905,20 @@ disjunction_rule(Parser *p) static expr_ty conjunction_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, conjunction_type, &_res)) + if (_PyPegen_is_memoized(p, conjunction_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -6531,8 +7927,10 @@ conjunction_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // inversion (('and' inversion))+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> conjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+")); expr_ty a; asdl_seq * b; if ( @@ -6541,8 +7939,10 @@ conjunction_rule(Parser *p) (b = _loop1_90_rule(p)) // (('and' inversion))+ ) { + D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -6552,29 +7952,38 @@ conjunction_rule(Parser *p) _res = _Py_BoolOp ( And , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s conjunction[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "inversion (('and' inversion))+")); } { // inversion if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> conjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "inversion")); expr_ty inversion_var; if ( (inversion_var = inversion_rule(p)) // inversion ) { + D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion")); _res = inversion_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s conjunction[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "inversion")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, conjunction_type, _res); + D(p->level--); return _res; } @@ -6582,15 +7991,20 @@ conjunction_rule(Parser *p) static expr_ty inversion_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, inversion_type, &_res)) + if (_PyPegen_is_memoized(p, inversion_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -6599,8 +8013,10 @@ inversion_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'not' inversion if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> inversion[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'not' inversion")); Token * _keyword; expr_ty a; if ( @@ -6609,8 +8025,10 @@ inversion_rule(Parser *p) (a = inversion_rule(p)) // inversion ) { + D(fprintf(stderr, "%*c+ inversion[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'not' inversion")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -6620,29 +8038,38 @@ inversion_rule(Parser *p) _res = _Py_UnaryOp ( Not , a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s inversion[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'not' inversion")); } { // comparison if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> inversion[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "comparison")); expr_ty comparison_var; if ( (comparison_var = comparison_rule(p)) // comparison ) { + D(fprintf(stderr, "%*c+ inversion[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "comparison")); _res = comparison_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s inversion[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "comparison")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, inversion_type, _res); + D(p->level--); return _res; } @@ -6650,13 +8077,16 @@ inversion_rule(Parser *p) static expr_ty comparison_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -6665,8 +8095,10 @@ comparison_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or compare_op_bitwise_or_pair+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> comparison[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); expr_ty a; asdl_seq * b; if ( @@ -6675,8 +8107,10 @@ comparison_rule(Parser *p) (b = _loop1_91_rule(p)) // compare_op_bitwise_or_pair+ ) { + D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -6686,28 +8120,37 @@ comparison_rule(Parser *p) _res = _Py_Compare ( a , CHECK ( _PyPegen_get_cmpops ( p , b ) ) , CHECK ( _PyPegen_get_exprs ( p , b ) ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s comparison[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); } { // bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> comparison[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or")); expr_ty bitwise_or_var; if ( (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or")); _res = bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s comparison[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6725,153 +8168,206 @@ comparison_rule(Parser *p) static CmpopExprPair* compare_op_bitwise_or_pair_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // eq_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "eq_bitwise_or")); CmpopExprPair* eq_bitwise_or_var; if ( (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "eq_bitwise_or")); _res = eq_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "eq_bitwise_or")); } { // noteq_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "noteq_bitwise_or")); CmpopExprPair* noteq_bitwise_or_var; if ( (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "noteq_bitwise_or")); _res = noteq_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "noteq_bitwise_or")); } { // lte_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lte_bitwise_or")); CmpopExprPair* lte_bitwise_or_var; if ( (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lte_bitwise_or")); _res = lte_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lte_bitwise_or")); } { // lt_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lt_bitwise_or")); CmpopExprPair* lt_bitwise_or_var; if ( (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lt_bitwise_or")); _res = lt_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lt_bitwise_or")); } { // gte_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "gte_bitwise_or")); CmpopExprPair* gte_bitwise_or_var; if ( (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "gte_bitwise_or")); _res = gte_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "gte_bitwise_or")); } { // gt_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "gt_bitwise_or")); CmpopExprPair* gt_bitwise_or_var; if ( (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "gt_bitwise_or")); _res = gt_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "gt_bitwise_or")); } { // notin_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "notin_bitwise_or")); CmpopExprPair* notin_bitwise_or_var; if ( (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "notin_bitwise_or")); _res = notin_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "notin_bitwise_or")); } { // in_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "in_bitwise_or")); CmpopExprPair* in_bitwise_or_var; if ( (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "in_bitwise_or")); _res = in_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "in_bitwise_or")); } { // isnot_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "isnot_bitwise_or")); CmpopExprPair* isnot_bitwise_or_var; if ( (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "isnot_bitwise_or")); _res = isnot_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "isnot_bitwise_or")); } { // is_bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "is_bitwise_or")); CmpopExprPair* is_bitwise_or_var; if ( (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or ) { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "is_bitwise_or")); _res = is_bitwise_or_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "is_bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6879,15 +8375,19 @@ compare_op_bitwise_or_pair_rule(Parser *p) static CmpopExprPair* eq_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // '==' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> eq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'==' bitwise_or")); Token * _literal; expr_ty a; if ( @@ -6896,17 +8396,22 @@ eq_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ eq_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'==' bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s eq_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'==' bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6914,15 +8419,19 @@ eq_bitwise_or_rule(Parser *p) static CmpopExprPair* noteq_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // ('!=') bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> noteq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or")); void *_tmp_92_var; expr_ty a; if ( @@ -6931,17 +8440,22 @@ noteq_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ noteq_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s noteq_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('!=') bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6949,15 +8463,19 @@ noteq_bitwise_or_rule(Parser *p) static CmpopExprPair* lte_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // '<=' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lte_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'<=' bitwise_or")); Token * _literal; expr_ty a; if ( @@ -6966,17 +8484,22 @@ lte_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ lte_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'<=' bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lte_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'<=' bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -6984,15 +8507,19 @@ lte_bitwise_or_rule(Parser *p) static CmpopExprPair* lt_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // '<' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> lt_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'<' bitwise_or")); Token * _literal; expr_ty a; if ( @@ -7001,17 +8528,22 @@ lt_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ lt_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'<' bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s lt_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'<' bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7019,15 +8551,19 @@ lt_bitwise_or_rule(Parser *p) static CmpopExprPair* gte_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // '>=' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> gte_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'>=' bitwise_or")); Token * _literal; expr_ty a; if ( @@ -7036,17 +8572,22 @@ gte_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ gte_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'>=' bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s gte_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'>=' bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7054,15 +8595,19 @@ gte_bitwise_or_rule(Parser *p) static CmpopExprPair* gt_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // '>' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> gt_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'>' bitwise_or")); Token * _literal; expr_ty a; if ( @@ -7071,17 +8616,22 @@ gt_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ gt_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'>' bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s gt_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'>' bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7089,15 +8639,19 @@ gt_bitwise_or_rule(Parser *p) static CmpopExprPair* notin_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // 'not' 'in' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> notin_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'not' 'in' bitwise_or")); Token * _keyword; Token * _keyword_1; expr_ty a; @@ -7109,17 +8663,22 @@ notin_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ notin_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'not' 'in' bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s notin_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'not' 'in' bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7127,15 +8686,19 @@ notin_bitwise_or_rule(Parser *p) static CmpopExprPair* in_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // 'in' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> in_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'in' bitwise_or")); Token * _keyword; expr_ty a; if ( @@ -7144,17 +8707,22 @@ in_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ in_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'in' bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , In , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s in_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'in' bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7162,15 +8730,19 @@ in_bitwise_or_rule(Parser *p) static CmpopExprPair* isnot_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // 'is' 'not' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> isnot_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'is' 'not' bitwise_or")); Token * _keyword; Token * _keyword_1; expr_ty a; @@ -7182,17 +8754,22 @@ isnot_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ isnot_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'is' 'not' bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s isnot_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'is' 'not' bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7200,15 +8777,19 @@ isnot_bitwise_or_rule(Parser *p) static CmpopExprPair* is_bitwise_or_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } CmpopExprPair* _res = NULL; int _mark = p->mark; { // 'is' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> is_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'is' bitwise_or")); Token * _keyword; expr_ty a; if ( @@ -7217,17 +8798,22 @@ is_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ is_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'is' bitwise_or")); _res = _PyPegen_cmpop_expr_pair ( p , Is , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s is_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'is' bitwise_or")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7237,14 +8823,18 @@ static expr_ty bitwise_or_raw(Parser *); static expr_ty bitwise_or_rule(Parser *p) { + D(p->level++); expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, bitwise_or_type, &_res)) + if (_PyPegen_is_memoized(p, bitwise_or_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; int _resmark = p->mark; while (1) { int tmpvar_1 = _PyPegen_update_memo(p, _mark, bitwise_or_type, _res); if (tmpvar_1) { + D(p->level--); return _res; } p->mark = _mark; @@ -7255,18 +8845,22 @@ bitwise_or_rule(Parser *p) _res = _raw; } p->mark = _resmark; + D(p->level--); return _res; } static expr_ty bitwise_or_raw(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -7275,8 +8869,10 @@ bitwise_or_raw(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or '|' bitwise_xor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or '|' bitwise_xor")); Token * _literal; expr_ty a; expr_ty b; @@ -7288,8 +8884,10 @@ bitwise_or_raw(Parser *p) (b = bitwise_xor_rule(p)) // bitwise_xor ) { + D(fprintf(stderr, "%*c+ bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or '|' bitwise_xor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7299,28 +8897,37 @@ bitwise_or_raw(Parser *p) _res = _Py_BinOp ( a , BitOr , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or '|' bitwise_xor")); } { // bitwise_xor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_xor")); expr_ty bitwise_xor_var; if ( (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor ) { + D(fprintf(stderr, "%*c+ bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_xor")); _res = bitwise_xor_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_xor")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7330,14 +8937,18 @@ static expr_ty bitwise_xor_raw(Parser *); static expr_ty bitwise_xor_rule(Parser *p) { + D(p->level++); expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, bitwise_xor_type, &_res)) + if (_PyPegen_is_memoized(p, bitwise_xor_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; int _resmark = p->mark; while (1) { int tmpvar_2 = _PyPegen_update_memo(p, _mark, bitwise_xor_type, _res); if (tmpvar_2) { + D(p->level--); return _res; } p->mark = _mark; @@ -7348,18 +8959,22 @@ bitwise_xor_rule(Parser *p) _res = _raw; } p->mark = _resmark; + D(p->level--); return _res; } static expr_ty bitwise_xor_raw(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -7368,8 +8983,10 @@ bitwise_xor_raw(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_xor '^' bitwise_and if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> bitwise_xor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_xor '^' bitwise_and")); Token * _literal; expr_ty a; expr_ty b; @@ -7381,8 +8998,10 @@ bitwise_xor_raw(Parser *p) (b = bitwise_and_rule(p)) // bitwise_and ) { + D(fprintf(stderr, "%*c+ bitwise_xor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_xor '^' bitwise_and")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7392,28 +9011,37 @@ bitwise_xor_raw(Parser *p) _res = _Py_BinOp ( a , BitXor , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_xor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_xor '^' bitwise_and")); } { // bitwise_and if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> bitwise_xor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_and")); expr_ty bitwise_and_var; if ( (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and ) { + D(fprintf(stderr, "%*c+ bitwise_xor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_and")); _res = bitwise_and_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_xor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_and")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7423,14 +9051,18 @@ static expr_ty bitwise_and_raw(Parser *); static expr_ty bitwise_and_rule(Parser *p) { + D(p->level++); expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, bitwise_and_type, &_res)) + if (_PyPegen_is_memoized(p, bitwise_and_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; int _resmark = p->mark; while (1) { int tmpvar_3 = _PyPegen_update_memo(p, _mark, bitwise_and_type, _res); if (tmpvar_3) { + D(p->level--); return _res; } p->mark = _mark; @@ -7441,18 +9073,22 @@ bitwise_and_rule(Parser *p) _res = _raw; } p->mark = _resmark; + D(p->level--); return _res; } static expr_ty bitwise_and_raw(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -7461,8 +9097,10 @@ bitwise_and_raw(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_and '&' shift_expr if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> bitwise_and[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_and '&' shift_expr")); Token * _literal; expr_ty a; expr_ty b; @@ -7474,8 +9112,10 @@ bitwise_and_raw(Parser *p) (b = shift_expr_rule(p)) // shift_expr ) { + D(fprintf(stderr, "%*c+ bitwise_and[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_and '&' shift_expr")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7485,28 +9125,37 @@ bitwise_and_raw(Parser *p) _res = _Py_BinOp ( a , BitAnd , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_and[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_and '&' shift_expr")); } { // shift_expr if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> bitwise_and[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "shift_expr")); expr_ty shift_expr_var; if ( (shift_expr_var = shift_expr_rule(p)) // shift_expr ) { + D(fprintf(stderr, "%*c+ bitwise_and[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "shift_expr")); _res = shift_expr_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_and[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7516,14 +9165,18 @@ static expr_ty shift_expr_raw(Parser *); static expr_ty shift_expr_rule(Parser *p) { + D(p->level++); expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, shift_expr_type, &_res)) + if (_PyPegen_is_memoized(p, shift_expr_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; int _resmark = p->mark; while (1) { int tmpvar_4 = _PyPegen_update_memo(p, _mark, shift_expr_type, _res); if (tmpvar_4) { + D(p->level--); return _res; } p->mark = _mark; @@ -7534,18 +9187,22 @@ shift_expr_rule(Parser *p) _res = _raw; } p->mark = _resmark; + D(p->level--); return _res; } static expr_ty shift_expr_raw(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -7554,8 +9211,10 @@ shift_expr_raw(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // shift_expr '<<' sum if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "shift_expr '<<' sum")); Token * _literal; expr_ty a; expr_ty b; @@ -7567,8 +9226,10 @@ shift_expr_raw(Parser *p) (b = sum_rule(p)) // sum ) { + D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "shift_expr '<<' sum")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7578,16 +9239,21 @@ shift_expr_raw(Parser *p) _res = _Py_BinOp ( a , LShift , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr '<<' sum")); } { // shift_expr '>>' sum if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "shift_expr '>>' sum")); Token * _literal; expr_ty a; expr_ty b; @@ -7599,8 +9265,10 @@ shift_expr_raw(Parser *p) (b = sum_rule(p)) // sum ) { + D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "shift_expr '>>' sum")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7610,28 +9278,37 @@ shift_expr_raw(Parser *p) _res = _Py_BinOp ( a , RShift , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr '>>' sum")); } { // sum if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum")); expr_ty sum_var; if ( (sum_var = sum_rule(p)) // sum ) { + D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum")); _res = sum_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7641,14 +9318,18 @@ static expr_ty sum_raw(Parser *); static expr_ty sum_rule(Parser *p) { + D(p->level++); expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, sum_type, &_res)) + if (_PyPegen_is_memoized(p, sum_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; int _resmark = p->mark; while (1) { int tmpvar_5 = _PyPegen_update_memo(p, _mark, sum_type, _res); if (tmpvar_5) { + D(p->level--); return _res; } p->mark = _mark; @@ -7659,18 +9340,22 @@ sum_rule(Parser *p) _res = _raw; } p->mark = _resmark; + D(p->level--); return _res; } static expr_ty sum_raw(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -7679,8 +9364,10 @@ sum_raw(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // sum '+' term if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> sum[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum '+' term")); Token * _literal; expr_ty a; expr_ty b; @@ -7692,8 +9379,10 @@ sum_raw(Parser *p) (b = term_rule(p)) // term ) { + D(fprintf(stderr, "%*c+ sum[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum '+' term")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7703,16 +9392,21 @@ sum_raw(Parser *p) _res = _Py_BinOp ( a , Add , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s sum[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum '+' term")); } { // sum '-' term if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> sum[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum '-' term")); Token * _literal; expr_ty a; expr_ty b; @@ -7724,8 +9418,10 @@ sum_raw(Parser *p) (b = term_rule(p)) // term ) { + D(fprintf(stderr, "%*c+ sum[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum '-' term")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7735,28 +9431,37 @@ sum_raw(Parser *p) _res = _Py_BinOp ( a , Sub , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s sum[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum '-' term")); } { // term if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> sum[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term")); expr_ty term_var; if ( (term_var = term_rule(p)) // term ) { + D(fprintf(stderr, "%*c+ sum[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term")); _res = term_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s sum[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7772,14 +9477,18 @@ static expr_ty term_raw(Parser *); static expr_ty term_rule(Parser *p) { + D(p->level++); expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, term_type, &_res)) + if (_PyPegen_is_memoized(p, term_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; int _resmark = p->mark; while (1) { int tmpvar_6 = _PyPegen_update_memo(p, _mark, term_type, _res); if (tmpvar_6) { + D(p->level--); return _res; } p->mark = _mark; @@ -7790,18 +9499,22 @@ term_rule(Parser *p) _res = _raw; } p->mark = _resmark; + D(p->level--); return _res; } static expr_ty term_raw(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -7810,8 +9523,10 @@ term_raw(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // term '*' factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '*' factor")); Token * _literal; expr_ty a; expr_ty b; @@ -7823,8 +9538,10 @@ term_raw(Parser *p) (b = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '*' factor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7834,16 +9551,21 @@ term_raw(Parser *p) _res = _Py_BinOp ( a , Mult , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '*' factor")); } { // term '/' factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '/' factor")); Token * _literal; expr_ty a; expr_ty b; @@ -7855,8 +9577,10 @@ term_raw(Parser *p) (b = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '/' factor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7866,16 +9590,21 @@ term_raw(Parser *p) _res = _Py_BinOp ( a , Div , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '/' factor")); } { // term '//' factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '//' factor")); Token * _literal; expr_ty a; expr_ty b; @@ -7887,8 +9616,10 @@ term_raw(Parser *p) (b = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '//' factor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7898,16 +9629,21 @@ term_raw(Parser *p) _res = _Py_BinOp ( a , FloorDiv , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '//' factor")); } { // term '%' factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '%' factor")); Token * _literal; expr_ty a; expr_ty b; @@ -7919,8 +9655,10 @@ term_raw(Parser *p) (b = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '%' factor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7930,16 +9668,21 @@ term_raw(Parser *p) _res = _Py_BinOp ( a , Mod , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '%' factor")); } { // term '@' factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '@' factor")); Token * _literal; expr_ty a; expr_ty b; @@ -7951,8 +9694,10 @@ term_raw(Parser *p) (b = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '@' factor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -7962,28 +9707,37 @@ term_raw(Parser *p) _res = CHECK_VERSION ( 5 , "The '@' operator is" , _Py_BinOp ( a , MatMult , b , EXTRA ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '@' factor")); } { // factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "factor")); expr_ty factor_var; if ( (factor_var = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "factor")); _res = factor_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "factor")); } _res = NULL; done: + D(p->level--); return _res; } @@ -7991,15 +9745,20 @@ term_raw(Parser *p) static expr_ty factor_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, factor_type, &_res)) + if (_PyPegen_is_memoized(p, factor_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -8008,8 +9767,10 @@ factor_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '+' factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+' factor")); Token * _literal; expr_ty a; if ( @@ -8018,8 +9779,10 @@ factor_rule(Parser *p) (a = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+' factor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8029,16 +9792,21 @@ factor_rule(Parser *p) _res = _Py_UnaryOp ( UAdd , a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+' factor")); } { // '-' factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-' factor")); Token * _literal; expr_ty a; if ( @@ -8047,8 +9815,10 @@ factor_rule(Parser *p) (a = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-' factor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8058,16 +9828,21 @@ factor_rule(Parser *p) _res = _Py_UnaryOp ( USub , a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-' factor")); } { // '~' factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'~' factor")); Token * _literal; expr_ty a; if ( @@ -8076,8 +9851,10 @@ factor_rule(Parser *p) (a = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'~' factor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8087,29 +9864,38 @@ factor_rule(Parser *p) _res = _Py_UnaryOp ( Invert , a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'~' factor")); } { // power if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "power")); expr_ty power_var; if ( (power_var = power_rule(p)) // power ) { + D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "power")); _res = power_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "power")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, factor_type, _res); + D(p->level--); return _res; } @@ -8117,13 +9903,16 @@ factor_rule(Parser *p) static expr_ty power_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -8132,8 +9921,10 @@ power_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // await_primary '**' factor if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> power[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "await_primary '**' factor")); Token * _literal; expr_ty a; expr_ty b; @@ -8145,8 +9936,10 @@ power_rule(Parser *p) (b = factor_rule(p)) // factor ) { + D(fprintf(stderr, "%*c+ power[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "await_primary '**' factor")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8156,28 +9949,37 @@ power_rule(Parser *p) _res = _Py_BinOp ( a , Pow , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s power[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "await_primary '**' factor")); } { // await_primary if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> power[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "await_primary")); expr_ty await_primary_var; if ( (await_primary_var = await_primary_rule(p)) // await_primary ) { + D(fprintf(stderr, "%*c+ power[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "await_primary")); _res = await_primary_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s power[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "await_primary")); } _res = NULL; done: + D(p->level--); return _res; } @@ -8185,15 +9987,20 @@ power_rule(Parser *p) static expr_ty await_primary_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, await_primary_type, &_res)) + if (_PyPegen_is_memoized(p, await_primary_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -8202,8 +10009,10 @@ await_primary_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // AWAIT primary if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "AWAIT primary")); expr_ty a; Token * await_var; if ( @@ -8212,8 +10021,10 @@ await_primary_rule(Parser *p) (a = primary_rule(p)) // primary ) { + D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "AWAIT primary")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8223,29 +10034,38 @@ await_primary_rule(Parser *p) _res = CHECK_VERSION ( 5 , "Await expressions are" , _Py_Await ( a , EXTRA ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s await_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "AWAIT primary")); } { // primary if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary")); expr_ty primary_var; if ( (primary_var = primary_rule(p)) // primary ) { + D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary")); _res = primary_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s await_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, await_primary_type, _res); + D(p->level--); return _res; } @@ -8260,14 +10080,18 @@ static expr_ty primary_raw(Parser *); static expr_ty primary_rule(Parser *p) { + D(p->level++); expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, primary_type, &_res)) + if (_PyPegen_is_memoized(p, primary_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; int _resmark = p->mark; while (1) { int tmpvar_7 = _PyPegen_update_memo(p, _mark, primary_type, _res); if (tmpvar_7) { + D(p->level--); return _res; } p->mark = _mark; @@ -8278,18 +10102,22 @@ primary_rule(Parser *p) _res = _raw; } p->mark = _resmark; + D(p->level--); return _res; } static expr_ty primary_raw(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -8298,8 +10126,10 @@ primary_raw(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // primary '.' NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary '.' NAME")); Token * _literal; expr_ty a; expr_ty b; @@ -8311,8 +10141,10 @@ primary_raw(Parser *p) (b = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary '.' NAME")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8322,16 +10154,21 @@ primary_raw(Parser *p) _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary '.' NAME")); } { // primary genexp if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary genexp")); expr_ty a; expr_ty b; if ( @@ -8340,8 +10177,10 @@ primary_raw(Parser *p) (b = genexp_rule(p)) // genexp ) { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary genexp")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8351,16 +10190,21 @@ primary_raw(Parser *p) _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary genexp")); } { // primary '(' arguments? ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary '(' arguments? ')'")); Token * _literal; Token * _literal_1; expr_ty a; @@ -8375,8 +10219,10 @@ primary_raw(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary '(' arguments? ')'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8386,16 +10232,21 @@ primary_raw(Parser *p) _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary '(' arguments? ')'")); } { // primary '[' slices ']' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary '[' slices ']'")); Token * _literal; Token * _literal_1; expr_ty a; @@ -8410,8 +10261,10 @@ primary_raw(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary '[' slices ']'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8421,28 +10274,37 @@ primary_raw(Parser *p) _res = _Py_Subscript ( a , b , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary '[' slices ']'")); } { // atom if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "atom")); expr_ty atom_var; if ( (atom_var = atom_rule(p)) // atom ) { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "atom")); _res = atom_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "atom")); } _res = NULL; done: + D(p->level--); return _res; } @@ -8450,13 +10312,16 @@ primary_raw(Parser *p) static expr_ty slices_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -8465,8 +10330,10 @@ slices_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // slice !',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> slices[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice !','")); expr_ty a; if ( (a = slice_rule(p)) // slice @@ -8474,19 +10341,25 @@ slices_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { + D(fprintf(stderr, "%*c+ slices[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice !','")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s slices[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice !','")); } { // ','.slice+ ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> slices[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.slice+ ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -8496,8 +10369,10 @@ slices_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ slices[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.slice+ ','?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8507,14 +10382,18 @@ slices_rule(Parser *p) _res = _Py_Tuple ( a , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s slices[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.slice+ ','?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -8522,13 +10401,16 @@ slices_rule(Parser *p) static expr_ty slice_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -8537,8 +10419,10 @@ slice_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression? ':' expression? [':' expression?] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> slice[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]")); Token * _literal; void *a; void *b; @@ -8553,8 +10437,10 @@ slice_rule(Parser *p) (c = _tmp_95_rule(p), 1) // [':' expression?] ) { + D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8564,32 +10450,42 @@ slice_rule(Parser *p) _res = _Py_Slice ( a , b , c , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s slice[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression? ':' expression? [':' expression?]")); } { // expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> slice[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); expr_ty a; if ( (a = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s slice[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -8608,13 +10504,16 @@ slice_rule(Parser *p) static expr_ty atom_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -8623,29 +10522,38 @@ atom_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); expr_ty name_var; if ( (name_var = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); _res = name_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); } { // 'True' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 527)) // token='True' ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8655,23 +10563,30 @@ atom_rule(Parser *p) _res = _Py_Constant ( Py_True , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); } { // 'False' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 528)) // token='False' ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8681,23 +10596,30 @@ atom_rule(Parser *p) _res = _Py_Constant ( Py_False , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); } { // 'None' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 529)) // token='None' ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8707,34 +10629,45 @@ atom_rule(Parser *p) _res = _Py_Constant ( Py_None , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } { // '__new_parser__' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'__new_parser__'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'__new_parser__'")); _res = RAISE_SYNTAX_ERROR ( "You found it!" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'__new_parser__'")); } { // &STRING strings if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&STRING strings")); expr_ty strings_var; if ( _PyPegen_lookahead(1, _PyPegen_string_token, p) @@ -8742,29 +10675,39 @@ atom_rule(Parser *p) (strings_var = strings_rule(p)) // strings ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&STRING strings")); _res = strings_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&STRING strings")); } { // NUMBER if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NUMBER")); expr_ty number_var; if ( (number_var = _PyPegen_number_token(p)) // NUMBER ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NUMBER")); _res = number_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NUMBER")); } { // &'(' (tuple | group | genexp) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); void *_tmp_96_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='(' @@ -8772,15 +10715,20 @@ atom_rule(Parser *p) (_tmp_96_var = _tmp_96_rule(p)) // tuple | group | genexp ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); _res = _tmp_96_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'(' (tuple | group | genexp)")); } { // &'[' (list | listcomp) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); void *_tmp_97_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='[' @@ -8788,15 +10736,20 @@ atom_rule(Parser *p) (_tmp_97_var = _tmp_97_rule(p)) // list | listcomp ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); _res = _tmp_97_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'[' (list | listcomp)")); } { // &'{' (dict | set | dictcomp | setcomp) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); void *_tmp_98_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{' @@ -8804,22 +10757,29 @@ atom_rule(Parser *p) (_tmp_98_var = _tmp_98_rule(p)) // dict | set | dictcomp | setcomp ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); _res = _tmp_98_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); } { // '...' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8829,14 +10789,18 @@ atom_rule(Parser *p) _res = _Py_Constant ( Py_Ellipsis , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -8844,34 +10808,45 @@ atom_rule(Parser *p) static expr_ty strings_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, strings_type, &_res)) + if (_PyPegen_is_memoized(p, strings_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; { // STRING+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING+")); asdl_seq * a; if ( (a = _loop1_99_rule(p)) // STRING+ ) { + D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING+")); _res = _PyPegen_concatenate_strings ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s strings[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING+")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, strings_type, _res); + D(p->level--); return _res; } @@ -8879,13 +10854,16 @@ strings_rule(Parser *p) static expr_ty list_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -8894,8 +10872,10 @@ list_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' star_named_expressions? ']' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> list[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' star_named_expressions? ']'")); Token * _literal; Token * _literal_1; void *a; @@ -8907,8 +10887,10 @@ list_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { + D(fprintf(stderr, "%*c+ list[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' star_named_expressions? ']'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8918,14 +10900,18 @@ list_rule(Parser *p) _res = _Py_List ( a , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s list[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' star_named_expressions? ']'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -8933,13 +10919,16 @@ list_rule(Parser *p) static expr_ty listcomp_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -8948,8 +10937,10 @@ listcomp_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' named_expression for_if_clauses ']' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> listcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' named_expression for_if_clauses ']'")); Token * _literal; Token * _literal_1; expr_ty a; @@ -8964,8 +10955,10 @@ listcomp_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { + D(fprintf(stderr, "%*c+ listcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' named_expression for_if_clauses ']'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -8975,28 +10968,37 @@ listcomp_rule(Parser *p) _res = _Py_ListComp ( a , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s listcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' named_expression for_if_clauses ']'")); } { // invalid_comprehension if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> listcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); void *invalid_comprehension_var; if ( (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { + D(fprintf(stderr, "%*c+ listcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); _res = invalid_comprehension_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s listcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_comprehension")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9004,13 +11006,16 @@ listcomp_rule(Parser *p) static expr_ty tuple_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -9019,8 +11024,10 @@ tuple_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' [star_named_expression ',' star_named_expressions?] ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> tuple[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' [star_named_expression ',' star_named_expressions?] ')'")); Token * _literal; Token * _literal_1; void *a; @@ -9032,8 +11039,10 @@ tuple_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ tuple[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' [star_named_expression ',' star_named_expressions?] ')'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9043,14 +11052,18 @@ tuple_rule(Parser *p) _res = _Py_Tuple ( a , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s tuple[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' [star_named_expression ',' star_named_expressions?] ')'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9058,15 +11071,19 @@ tuple_rule(Parser *p) static expr_ty group_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; { // '(' (yield_expr | named_expression) ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> group[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); Token * _literal; Token * _literal_1; void *a; @@ -9078,17 +11095,22 @@ group_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ group[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s group[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9096,13 +11118,16 @@ group_rule(Parser *p) static expr_ty genexp_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -9111,8 +11136,10 @@ genexp_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' expression for_if_clauses ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> genexp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' expression for_if_clauses ')'")); Token * _literal; Token * _literal_1; expr_ty a; @@ -9127,8 +11154,10 @@ genexp_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ genexp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' expression for_if_clauses ')'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9138,28 +11167,37 @@ genexp_rule(Parser *p) _res = _Py_GeneratorExp ( a , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s genexp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' expression for_if_clauses ')'")); } { // invalid_comprehension if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> genexp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); void *invalid_comprehension_var; if ( (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { + D(fprintf(stderr, "%*c+ genexp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); _res = invalid_comprehension_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s genexp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_comprehension")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9167,13 +11205,16 @@ genexp_rule(Parser *p) static expr_ty set_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -9182,8 +11223,10 @@ set_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expressions_list '}' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> set[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' expressions_list '}'")); Token * _literal; Token * _literal_1; asdl_seq* a; @@ -9195,8 +11238,10 @@ set_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { + D(fprintf(stderr, "%*c+ set[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' expressions_list '}'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9206,14 +11251,18 @@ set_rule(Parser *p) _res = _Py_Set ( a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s set[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' expressions_list '}'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9221,13 +11270,16 @@ set_rule(Parser *p) static expr_ty setcomp_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -9236,8 +11288,10 @@ setcomp_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expression for_if_clauses '}' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> setcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' expression for_if_clauses '}'")); Token * _literal; Token * _literal_1; expr_ty a; @@ -9252,8 +11306,10 @@ setcomp_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { + D(fprintf(stderr, "%*c+ setcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' expression for_if_clauses '}'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9263,28 +11319,37 @@ setcomp_rule(Parser *p) _res = _Py_SetComp ( a , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s setcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' expression for_if_clauses '}'")); } { // invalid_comprehension if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> setcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); void *invalid_comprehension_var; if ( (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { + D(fprintf(stderr, "%*c+ setcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); _res = invalid_comprehension_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s setcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_comprehension")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9292,13 +11357,16 @@ setcomp_rule(Parser *p) static expr_ty dict_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -9307,8 +11375,10 @@ dict_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' double_starred_kvpairs? '}' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> dict[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' double_starred_kvpairs? '}'")); Token * _literal; Token * _literal_1; void *a; @@ -9320,8 +11390,10 @@ dict_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { + D(fprintf(stderr, "%*c+ dict[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' double_starred_kvpairs? '}'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9331,14 +11403,18 @@ dict_rule(Parser *p) _res = _Py_Dict ( CHECK ( _PyPegen_get_keys ( p , a ) ) , CHECK ( _PyPegen_get_values ( p , a ) ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s dict[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' double_starred_kvpairs? '}'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9346,13 +11422,16 @@ dict_rule(Parser *p) static expr_ty dictcomp_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -9361,8 +11440,10 @@ dictcomp_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' kvpair for_if_clauses '}' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> dictcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' kvpair for_if_clauses '}'")); Token * _literal; Token * _literal_1; KeyValuePair* a; @@ -9377,8 +11458,10 @@ dictcomp_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { + D(fprintf(stderr, "%*c+ dictcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' kvpair for_if_clauses '}'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9388,28 +11471,37 @@ dictcomp_rule(Parser *p) _res = _Py_DictComp ( a -> key , a -> value , b , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s dictcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' kvpair for_if_clauses '}'")); } { // invalid_dict_comprehension if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> dictcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_dict_comprehension")); void *invalid_dict_comprehension_var; if ( (invalid_dict_comprehension_var = invalid_dict_comprehension_rule(p)) // invalid_dict_comprehension ) { + D(fprintf(stderr, "%*c+ dictcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_dict_comprehension")); _res = invalid_dict_comprehension_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s dictcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_dict_comprehension")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9417,15 +11509,19 @@ dictcomp_rule(Parser *p) static asdl_seq* double_starred_kvpairs_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.double_starred_kvpair+ ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -9435,17 +11531,22 @@ double_starred_kvpairs_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ','?")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s double_starred_kvpairs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.double_starred_kvpair+ ','?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9453,15 +11554,19 @@ double_starred_kvpairs_rule(Parser *p) static KeyValuePair* double_starred_kvpair_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } KeyValuePair* _res = NULL; int _mark = p->mark; { // '**' bitwise_or if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> double_starred_kvpair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' bitwise_or")); Token * _literal; expr_ty a; if ( @@ -9470,31 +11575,41 @@ double_starred_kvpair_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { + D(fprintf(stderr, "%*c+ double_starred_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' bitwise_or")); _res = _PyPegen_key_value_pair ( p , NULL , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s double_starred_kvpair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' bitwise_or")); } { // kvpair if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> double_starred_kvpair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kvpair")); KeyValuePair* kvpair_var; if ( (kvpair_var = kvpair_rule(p)) // kvpair ) { + D(fprintf(stderr, "%*c+ double_starred_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kvpair")); _res = kvpair_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s double_starred_kvpair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kvpair")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9502,15 +11617,19 @@ double_starred_kvpair_rule(Parser *p) static KeyValuePair* kvpair_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } KeyValuePair* _res = NULL; int _mark = p->mark; { // expression ':' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kvpair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':' expression")); Token * _literal; expr_ty a; expr_ty b; @@ -9522,17 +11641,22 @@ kvpair_rule(Parser *p) (b = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' expression")); _res = _PyPegen_key_value_pair ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kvpair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9540,27 +11664,35 @@ kvpair_rule(Parser *p) static asdl_seq* for_if_clauses_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // for_if_clause+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> for_if_clauses[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); asdl_seq * _loop1_104_var; if ( (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ ) { + D(fprintf(stderr, "%*c+ for_if_clauses[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); _res = _loop1_104_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s for_if_clauses[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9570,15 +11702,19 @@ for_if_clauses_rule(Parser *p) static comprehension_ty for_if_clause_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } comprehension_ty _res = NULL; int _mark = p->mark; { // ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); Token * _keyword; Token * _keyword_1; expr_ty a; @@ -9599,19 +11735,25 @@ for_if_clause_rule(Parser *p) (c = _loop0_105_rule(p)) // (('if' disjunction))* ) { + D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); _res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); } { // 'for' star_targets 'in' disjunction (('if' disjunction))* if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); Token * _keyword; Token * _keyword_1; expr_ty a; @@ -9629,17 +11771,22 @@ for_if_clause_rule(Parser *p) (c = _loop0_106_rule(p)) // (('if' disjunction))* ) { + D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); _res = _Py_comprehension ( a , b , c , 0 , p -> arena ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9647,13 +11794,16 @@ for_if_clause_rule(Parser *p) static expr_ty yield_expr_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -9662,8 +11812,10 @@ yield_expr_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'yield' 'from' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> yield_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'yield' 'from' expression")); Token * _keyword; Token * _keyword_1; expr_ty a; @@ -9675,8 +11827,10 @@ yield_expr_rule(Parser *p) (a = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ yield_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'yield' 'from' expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9686,16 +11840,21 @@ yield_expr_rule(Parser *p) _res = _Py_YieldFrom ( a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s yield_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'yield' 'from' expression")); } { // 'yield' star_expressions? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> yield_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'yield' star_expressions?")); Token * _keyword; void *a; if ( @@ -9704,8 +11863,10 @@ yield_expr_rule(Parser *p) (a = star_expressions_rule(p), 1) // star_expressions? ) { + D(fprintf(stderr, "%*c+ yield_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'yield' star_expressions?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9715,14 +11876,18 @@ yield_expr_rule(Parser *p) _res = _Py_Yield ( a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s yield_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'yield' star_expressions?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9730,17 +11895,23 @@ yield_expr_rule(Parser *p) static expr_ty arguments_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, arguments_type, &_res)) + if (_PyPegen_is_memoized(p, arguments_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; { // args ','? &')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','? &')'")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty a; @@ -9752,32 +11923,42 @@ arguments_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { + D(fprintf(stderr, "%*c+ arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','? &')'")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','? &')'")); } { // incorrect_arguments if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "incorrect_arguments")); void *incorrect_arguments_var; if ( (incorrect_arguments_var = incorrect_arguments_rule(p)) // incorrect_arguments ) { + D(fprintf(stderr, "%*c+ arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "incorrect_arguments")); _res = incorrect_arguments_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "incorrect_arguments")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, arguments_type, _res); + D(p->level--); return _res; } @@ -9785,13 +11966,16 @@ arguments_rule(Parser *p) static expr_ty args_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -9800,8 +11984,10 @@ args_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // starred_expression [',' args] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> args[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression [',' args]")); expr_ty a; void *b; if ( @@ -9810,8 +11996,10 @@ args_rule(Parser *p) (b = _tmp_107_rule(p), 1) // [',' args] ) { + D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression [',' args]")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9821,23 +12009,30 @@ args_rule(Parser *p) _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s args[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression [',' args]")); } { // kwargs if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> args[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwargs")); asdl_seq* a; if ( (a = kwargs_rule(p)) // kwargs ) { + D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwargs")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9847,16 +12042,21 @@ args_rule(Parser *p) _res = _Py_Call ( _PyPegen_dummy_name ( p ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_extract_starred_exprs ( p , a ) ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_delete_starred_exprs ( p , a ) ) , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s args[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwargs")); } { // named_expression [',' args] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> args[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression [',' args]")); expr_ty a; void *b; if ( @@ -9865,8 +12065,10 @@ args_rule(Parser *p) (b = _tmp_108_rule(p), 1) // [',' args] ) { + D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression [',' args]")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9876,14 +12078,18 @@ args_rule(Parser *p) _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s args[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression [',' args]")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9894,15 +12100,19 @@ args_rule(Parser *p) static asdl_seq* kwargs_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); Token * _literal; asdl_seq * a; asdl_seq * b; @@ -9914,45 +12124,60 @@ kwargs_rule(Parser *p) (b = _gather_111_rule(p)) // ','.kwarg_or_double_starred+ ) { + D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); _res = _PyPegen_join_sequences ( p , a , b ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwargs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); } { // ','.kwarg_or_starred+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); asdl_seq * _gather_113_var; if ( (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ ) { + D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); _res = _gather_113_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwargs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.kwarg_or_starred+")); } { // ','.kwarg_or_double_starred+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); asdl_seq * _gather_115_var; if ( (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ ) { + D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); _res = _gather_115_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwargs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.kwarg_or_double_starred+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -9960,13 +12185,16 @@ kwargs_rule(Parser *p) static expr_ty starred_expression_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -9975,8 +12203,10 @@ starred_expression_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> starred_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression")); Token * _literal; expr_ty a; if ( @@ -9985,8 +12215,10 @@ starred_expression_rule(Parser *p) (a = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ starred_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -9996,14 +12228,18 @@ starred_expression_rule(Parser *p) _res = _Py_Starred ( a , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s starred_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -10011,13 +12247,16 @@ starred_expression_rule(Parser *p) static KeywordOrStarred* kwarg_or_starred_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } KeywordOrStarred* _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -10026,8 +12265,10 @@ kwarg_or_starred_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwarg_or_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); Token * _literal; expr_ty a; expr_ty b; @@ -10039,8 +12280,10 @@ kwarg_or_starred_rule(Parser *p) (b = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ kwarg_or_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10050,46 +12293,61 @@ kwarg_or_starred_rule(Parser *p) _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '=' expression")); } { // starred_expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwarg_or_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty a; if ( (a = starred_expression_rule(p)) // starred_expression ) { + D(fprintf(stderr, "%*c+ kwarg_or_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = _PyPegen_keyword_or_starred ( p , a , 0 ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } { // invalid_kwarg if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwarg_or_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); void *invalid_kwarg_var; if ( (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg ) { + D(fprintf(stderr, "%*c+ kwarg_or_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); _res = invalid_kwarg_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_kwarg")); } _res = NULL; done: + D(p->level--); return _res; } @@ -10097,13 +12355,16 @@ kwarg_or_starred_rule(Parser *p) static KeywordOrStarred* kwarg_or_double_starred_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } KeywordOrStarred* _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -10112,8 +12373,10 @@ kwarg_or_double_starred_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwarg_or_double_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); Token * _literal; expr_ty a; expr_ty b; @@ -10125,8 +12388,10 @@ kwarg_or_double_starred_rule(Parser *p) (b = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ kwarg_or_double_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10136,16 +12401,21 @@ kwarg_or_double_starred_rule(Parser *p) _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_double_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '=' expression")); } { // '**' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwarg_or_double_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' expression")); Token * _literal; expr_ty a; if ( @@ -10154,8 +12424,10 @@ kwarg_or_double_starred_rule(Parser *p) (a = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ kwarg_or_double_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10165,28 +12437,37 @@ kwarg_or_double_starred_rule(Parser *p) _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( NULL , a , EXTRA ) ) , 1 ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_double_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' expression")); } { // invalid_kwarg if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> kwarg_or_double_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); void *invalid_kwarg_var; if ( (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg ) { + D(fprintf(stderr, "%*c+ kwarg_or_double_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); _res = invalid_kwarg_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_double_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_kwarg")); } _res = NULL; done: + D(p->level--); return _res; } @@ -10194,13 +12475,16 @@ kwarg_or_double_starred_rule(Parser *p) static expr_ty star_targets_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -10209,8 +12493,10 @@ star_targets_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_target !',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target !','")); expr_ty a; if ( (a = star_target_rule(p)) // star_target @@ -10218,19 +12504,25 @@ star_targets_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { + D(fprintf(stderr, "%*c+ star_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target !','")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target !','")); } { // star_target ((',' star_target))* ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target ((',' star_target))* ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty a; @@ -10243,8 +12535,10 @@ star_targets_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ star_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target ((',' star_target))* ','?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10254,14 +12548,18 @@ star_targets_rule(Parser *p) _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target ((',' star_target))* ','?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -10269,15 +12567,19 @@ star_targets_rule(Parser *p) static asdl_seq* star_targets_seq_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.star_target+ ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_targets_seq[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.star_target+ ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -10287,17 +12589,22 @@ star_targets_seq_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ star_targets_seq[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.star_target+ ','?")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_targets_seq[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.star_target+ ','?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -10309,15 +12616,20 @@ star_targets_seq_rule(Parser *p) static expr_ty star_target_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, star_target_type, &_res)) + if (_PyPegen_is_memoized(p, star_target_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -10326,8 +12638,10 @@ star_target_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' (!'*' star_target) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)")); Token * _literal; void *a; if ( @@ -10336,8 +12650,10 @@ star_target_rule(Parser *p) (a = _tmp_120_rule(p)) // !'*' star_target ) { + D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10347,16 +12663,21 @@ star_target_rule(Parser *p) _res = _Py_Starred ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' (!'*' star_target)")); } { // t_primary '.' NAME !t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); Token * _literal; expr_ty a; expr_ty b; @@ -10370,8 +12691,10 @@ star_target_rule(Parser *p) _PyPegen_lookahead(0, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10381,16 +12704,21 @@ star_target_rule(Parser *p) _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); } { // t_primary '[' slices ']' !t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); Token * _literal; Token * _literal_1; expr_ty a; @@ -10407,8 +12735,10 @@ star_target_rule(Parser *p) _PyPegen_lookahead(0, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10418,29 +12748,38 @@ star_target_rule(Parser *p) _res = _Py_Subscript ( a , b , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); } { // star_atom if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_atom")); expr_ty star_atom_var; if ( (star_atom_var = star_atom_rule(p)) // star_atom ) { + D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_atom")); _res = star_atom_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_atom")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, star_target_type, _res); + D(p->level--); return _res; } @@ -10452,13 +12791,16 @@ star_target_rule(Parser *p) static expr_ty star_atom_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -10467,26 +12809,34 @@ star_atom_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); _res = _PyPegen_set_expr_context ( p , a , Store ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); } { // '(' star_target ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' star_target ')'")); Token * _literal; Token * _literal_1; expr_ty a; @@ -10498,19 +12848,25 @@ star_atom_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' star_target ')'")); _res = _PyPegen_set_expr_context ( p , a , Store ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' star_target ')'")); } { // '(' star_targets_seq? ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' star_targets_seq? ')'")); Token * _literal; Token * _literal_1; void *a; @@ -10522,8 +12878,10 @@ star_atom_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' star_targets_seq? ')'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10533,16 +12891,21 @@ star_atom_rule(Parser *p) _res = _Py_Tuple ( a , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' star_targets_seq? ')'")); } { // '[' star_targets_seq? ']' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' star_targets_seq? ']'")); Token * _literal; Token * _literal_1; void *a; @@ -10554,8 +12917,10 @@ star_atom_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { + D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' star_targets_seq? ']'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10565,14 +12930,18 @@ star_atom_rule(Parser *p) _res = _Py_List ( a , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' star_targets_seq? ']'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -10580,47 +12949,62 @@ star_atom_rule(Parser *p) static expr_ty single_target_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; { // single_subscript_attribute_target if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> single_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); expr_ty single_subscript_attribute_target_var; if ( (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target ) { + D(fprintf(stderr, "%*c+ single_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); _res = single_subscript_attribute_target_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s single_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_subscript_attribute_target")); } { // NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> single_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ single_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); _res = _PyPegen_set_expr_context ( p , a , Store ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s single_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); } { // '(' single_target ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> single_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); Token * _literal; Token * _literal_1; expr_ty a; @@ -10632,17 +13016,22 @@ single_target_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ single_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s single_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' single_target ')'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -10652,13 +13041,16 @@ single_target_rule(Parser *p) static expr_ty single_subscript_attribute_target_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -10667,8 +13059,10 @@ single_subscript_attribute_target_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> single_subscript_attribute_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); Token * _literal; expr_ty a; expr_ty b; @@ -10682,8 +13076,10 @@ single_subscript_attribute_target_rule(Parser *p) _PyPegen_lookahead(0, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ single_subscript_attribute_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10693,16 +13089,21 @@ single_subscript_attribute_target_rule(Parser *p) _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s single_subscript_attribute_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); } { // t_primary '[' slices ']' !t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> single_subscript_attribute_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); Token * _literal; Token * _literal_1; expr_ty a; @@ -10719,8 +13120,10 @@ single_subscript_attribute_target_rule(Parser *p) _PyPegen_lookahead(0, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ single_subscript_attribute_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10730,14 +13133,18 @@ single_subscript_attribute_target_rule(Parser *p) _res = _Py_Subscript ( a , b , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s single_subscript_attribute_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); } _res = NULL; done: + D(p->level--); return _res; } @@ -10745,15 +13152,19 @@ single_subscript_attribute_target_rule(Parser *p) static asdl_seq* del_targets_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.del_target+ ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.del_target+ ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -10763,17 +13174,22 @@ del_targets_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ del_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.del_target+ ','?")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.del_target+ ','?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -10784,15 +13200,20 @@ del_targets_rule(Parser *p) static expr_ty del_target_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, del_target_type, &_res)) + if (_PyPegen_is_memoized(p, del_target_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -10801,8 +13222,10 @@ del_target_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME &del_target_end if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &del_target_end")); Token * _literal; expr_ty a; expr_ty b; @@ -10816,8 +13239,10 @@ del_target_rule(Parser *p) _PyPegen_lookahead(1, del_target_end_rule, p) ) { + D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &del_target_end")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10827,16 +13252,21 @@ del_target_rule(Parser *p) _res = _Py_Attribute ( a , b -> v . Name . id , Del , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME &del_target_end")); } { // t_primary '[' slices ']' &del_target_end if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); Token * _literal; Token * _literal_1; expr_ty a; @@ -10853,8 +13283,10 @@ del_target_rule(Parser *p) _PyPegen_lookahead(1, del_target_end_rule, p) ) { + D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10864,29 +13296,38 @@ del_target_rule(Parser *p) _res = _Py_Subscript ( a , b , Del , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); } { // del_t_atom if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_t_atom")); expr_ty del_t_atom_var; if ( (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom ) { + D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_t_atom")); _res = del_t_atom_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_t_atom")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, del_target_type, _res); + D(p->level--); return _res; } @@ -10899,13 +13340,16 @@ del_target_rule(Parser *p) static expr_ty del_t_atom_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -10914,8 +13358,10 @@ del_t_atom_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME &del_target_end if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME &del_target_end")); expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME @@ -10923,19 +13369,25 @@ del_t_atom_rule(Parser *p) _PyPegen_lookahead(1, del_target_end_rule, p) ) { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME &del_target_end")); _res = _PyPegen_set_expr_context ( p , a , Del ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME &del_target_end")); } { // '(' del_target ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' del_target ')'")); Token * _literal; Token * _literal_1; expr_ty a; @@ -10947,19 +13399,25 @@ del_t_atom_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' del_target ')'")); _res = _PyPegen_set_expr_context ( p , a , Del ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' del_target ')'")); } { // '(' del_targets? ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' del_targets? ')'")); Token * _literal; Token * _literal_1; void *a; @@ -10971,8 +13429,10 @@ del_t_atom_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' del_targets? ')'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -10982,16 +13442,21 @@ del_t_atom_rule(Parser *p) _res = _Py_Tuple ( a , Del , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' del_targets? ')'")); } { // '[' del_targets? ']' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' del_targets? ']'")); Token * _literal; Token * _literal_1; void *a; @@ -11003,8 +13468,10 @@ del_t_atom_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' del_targets? ']'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -11014,28 +13481,37 @@ del_t_atom_rule(Parser *p) _res = _Py_List ( a , Del , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' del_targets? ']'")); } { // invalid_del_target if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_del_target")); void *invalid_del_target_var; if ( (invalid_del_target_var = invalid_del_target_rule(p)) // invalid_del_target ) { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_del_target")); _res = invalid_del_target_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_del_target")); } _res = NULL; done: + D(p->level--); return _res; } @@ -11043,83 +13519,111 @@ del_t_atom_rule(Parser *p) static void * del_target_end_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ']' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "']'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 10)) // token=']' ) { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "']'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "']'")); } { // ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // ';' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 13)) // token=';' ) { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'")); } { // NEWLINE if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); _res = newline_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); } _res = NULL; done: + D(p->level--); return _res; } @@ -11127,15 +13631,19 @@ del_target_end_rule(Parser *p) static asdl_seq* targets_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq* _res = NULL; int _mark = p->mark; { // ','.target+ ','? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.target+ ','?")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; @@ -11145,17 +13653,22 @@ targets_rule(Parser *p) (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { + D(fprintf(stderr, "%*c+ targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.target+ ','?")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.target+ ','?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -11166,15 +13679,20 @@ targets_rule(Parser *p) static expr_ty target_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, target_type, &_res)) + if (_PyPegen_is_memoized(p, target_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -11183,8 +13701,10 @@ target_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); Token * _literal; expr_ty a; expr_ty b; @@ -11198,8 +13718,10 @@ target_rule(Parser *p) _PyPegen_lookahead(0, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -11209,16 +13731,21 @@ target_rule(Parser *p) _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); } { // t_primary '[' slices ']' !t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); Token * _literal; Token * _literal_1; expr_ty a; @@ -11235,8 +13762,10 @@ target_rule(Parser *p) _PyPegen_lookahead(0, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -11246,29 +13775,38 @@ target_rule(Parser *p) _res = _Py_Subscript ( a , b , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); } { // t_atom if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_atom")); expr_ty t_atom_var; if ( (t_atom_var = t_atom_rule(p)) // t_atom ) { + D(fprintf(stderr, "%*c+ target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_atom")); _res = t_atom_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_atom")); } _res = NULL; done: _PyPegen_insert_memo(p, _mark, target_type, _res); + D(p->level--); return _res; } @@ -11283,14 +13821,18 @@ static expr_ty t_primary_raw(Parser *); static expr_ty t_primary_rule(Parser *p) { + D(p->level++); expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, t_primary_type, &_res)) + if (_PyPegen_is_memoized(p, t_primary_type, &_res)) { + D(p->level--); return _res; + } int _mark = p->mark; int _resmark = p->mark; while (1) { int tmpvar_8 = _PyPegen_update_memo(p, _mark, t_primary_type, _res); if (tmpvar_8) { + D(p->level--); return _res; } p->mark = _mark; @@ -11301,18 +13843,22 @@ t_primary_rule(Parser *p) _res = _raw; } p->mark = _resmark; + D(p->level--); return _res; } static expr_ty t_primary_raw(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -11321,8 +13867,10 @@ t_primary_raw(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME &t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &t_lookahead")); Token * _literal; expr_ty a; expr_ty b; @@ -11336,8 +13884,10 @@ t_primary_raw(Parser *p) _PyPegen_lookahead(1, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -11347,16 +13897,21 @@ t_primary_raw(Parser *p) _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME &t_lookahead")); } { // t_primary '[' slices ']' &t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &t_lookahead")); Token * _literal; Token * _literal_1; expr_ty a; @@ -11373,8 +13928,10 @@ t_primary_raw(Parser *p) _PyPegen_lookahead(1, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -11384,16 +13941,21 @@ t_primary_raw(Parser *p) _res = _Py_Subscript ( a , b , Load , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' &t_lookahead")); } { // t_primary genexp &t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary genexp &t_lookahead")); expr_ty a; expr_ty b; if ( @@ -11404,8 +13966,10 @@ t_primary_raw(Parser *p) _PyPegen_lookahead(1, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary genexp &t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -11415,16 +13979,21 @@ t_primary_raw(Parser *p) _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary genexp &t_lookahead")); } { // t_primary '(' arguments? ')' &t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '(' arguments? ')' &t_lookahead")); Token * _literal; Token * _literal_1; expr_ty a; @@ -11441,8 +14010,10 @@ t_primary_raw(Parser *p) _PyPegen_lookahead(1, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '(' arguments? ')' &t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -11452,16 +14023,21 @@ t_primary_raw(Parser *p) _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '(' arguments? ')' &t_lookahead")); } { // atom &t_lookahead if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "atom &t_lookahead")); expr_ty a; if ( (a = atom_rule(p)) // atom @@ -11469,17 +14045,22 @@ t_primary_raw(Parser *p) _PyPegen_lookahead(1, t_lookahead_rule, p) ) { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "atom &t_lookahead")); _res = a; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "atom &t_lookahead")); } _res = NULL; done: + D(p->level--); return _res; } @@ -11487,55 +14068,73 @@ t_primary_raw(Parser *p) static void * t_lookahead_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '(' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_lookahead[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { + D(fprintf(stderr, "%*c+ t_lookahead[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_lookahead[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); } { // '[' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_lookahead[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { + D(fprintf(stderr, "%*c+ t_lookahead[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_lookahead[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '.' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_lookahead[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { + D(fprintf(stderr, "%*c+ t_lookahead[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_lookahead[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -11543,13 +14142,16 @@ t_lookahead_rule(Parser *p) static expr_ty t_atom_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } expr_ty _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; + D(p->level--); return NULL; } int _start_lineno = p->tokens[_mark]->lineno; @@ -11558,26 +14160,34 @@ t_atom_rule(Parser *p) UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); _res = _PyPegen_set_expr_context ( p , a , Store ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); } { // '(' target ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' target ')'")); Token * _literal; Token * _literal_1; expr_ty a; @@ -11589,19 +14199,25 @@ t_atom_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' target ')'")); _res = _PyPegen_set_expr_context ( p , a , Store ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' target ')'")); } { // '(' targets? ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' targets? ')'")); Token * _literal; Token * _literal_1; void *b; @@ -11613,8 +14229,10 @@ t_atom_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' targets? ')'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -11624,16 +14242,21 @@ t_atom_rule(Parser *p) _res = _Py_Tuple ( b , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' targets? ')'")); } { // '[' targets? ']' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' targets? ']'")); Token * _literal; Token * _literal_1; void *b; @@ -11645,8 +14268,10 @@ t_atom_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { + D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' targets? ']'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { + D(p->level--); return NULL; } int _end_lineno = _token->end_lineno; @@ -11656,14 +14281,18 @@ t_atom_rule(Parser *p) _res = _Py_List ( b , Store , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' targets? ']'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -11676,15 +14305,19 @@ t_atom_rule(Parser *p) static void * incorrect_arguments_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // args ',' '*' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ',' '*'")); Token * _literal; Token * _literal_1; expr_ty args_var; @@ -11696,19 +14329,25 @@ incorrect_arguments_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' ) { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ',' '*'")); _res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ',' '*'")); } { // expression for_if_clauses ',' [args | expression for_if_clauses] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -11724,19 +14363,25 @@ incorrect_arguments_rule(Parser *p) (_opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] ) { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); } { // args for_if_clauses if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args for_if_clauses")); expr_ty a; asdl_seq* for_if_clauses_var; if ( @@ -11745,19 +14390,25 @@ incorrect_arguments_rule(Parser *p) (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args for_if_clauses")); _res = _PyPegen_nonparen_genexp_in_call ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args for_if_clauses")); } { // args ',' expression for_if_clauses if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ',' expression for_if_clauses")); Token * _literal; expr_ty a; expr_ty args_var; @@ -11772,19 +14423,25 @@ incorrect_arguments_rule(Parser *p) (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ',' expression for_if_clauses")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ',' expression for_if_clauses")); } { // args ',' args if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ',' args")); Token * _literal; expr_ty a; expr_ty args_var; @@ -11796,17 +14453,22 @@ incorrect_arguments_rule(Parser *p) (args_var = args_rule(p)) // args ) { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ',' args")); _res = _PyPegen_arguments_parsing_error ( p , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ',' args")); } _res = NULL; done: + D(p->level--); return _res; } @@ -11814,15 +14476,19 @@ incorrect_arguments_rule(Parser *p) static void * invalid_kwarg_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // expression '=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_kwarg[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression '='")); Token * _literal; expr_ty a; if ( @@ -11831,17 +14497,22 @@ invalid_kwarg_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { + D(fprintf(stderr, "%*c+ invalid_kwarg[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression '='")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "expression cannot contain assignment, perhaps you meant \"==\"?" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_kwarg[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression '='")); } _res = NULL; done: + D(p->level--); return _res; } @@ -11849,15 +14520,19 @@ invalid_kwarg_rule(Parser *p) static void * invalid_named_expression_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // expression ':=' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':=' expression")); Token * _literal; expr_ty a; expr_ty expression_var; @@ -11869,17 +14544,22 @@ invalid_named_expression_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':=' expression")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':=' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -11893,15 +14573,19 @@ invalid_named_expression_rule(Parser *p) static void * invalid_assignment_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // list ':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list ':'")); Token * _literal; expr_ty a; if ( @@ -11910,19 +14594,25 @@ invalid_assignment_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list ':'")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not list) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list ':'")); } { // tuple ':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple ':'")); Token * _literal; expr_ty a; if ( @@ -11931,19 +14621,25 @@ invalid_assignment_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple ':'")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple ':'")); } { // star_named_expression ',' star_named_expressions* ':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); Token * _literal; Token * _literal_1; asdl_seq * _loop0_126_var; @@ -11958,19 +14654,25 @@ invalid_assignment_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); } { // expression ':' expression ['=' annotated_rhs] if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -11986,19 +14688,25 @@ invalid_assignment_rule(Parser *p) (_opt_var = _tmp_127_rule(p), 1) // ['=' annotated_rhs] ) { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "illegal target for annotation" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); } { // star_expressions '=' (yield_expr | star_expressions) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions '=' (yield_expr | star_expressions)")); Token * _literal; void *_tmp_128_var; expr_ty a; @@ -12010,19 +14718,25 @@ invalid_assignment_rule(Parser *p) (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions ) { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions '=' (yield_expr | star_expressions)")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( _PyPegen_get_invalid_target ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( _PyPegen_get_invalid_target ( a ) ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions '=' (yield_expr | star_expressions)")); } { // star_expressions augassign (yield_expr | star_expressions) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); void *_tmp_129_var; expr_ty a; AugOperator* augassign_var; @@ -12034,17 +14748,22 @@ invalid_assignment_rule(Parser *p) (_tmp_129_var = _tmp_129_rule(p)) // yield_expr | star_expressions ) { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "'%s' is an illegal expression for augmented assignment" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12052,15 +14771,19 @@ invalid_assignment_rule(Parser *p) static void * invalid_block_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // NEWLINE !INDENT if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE !INDENT")); Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' @@ -12068,17 +14791,22 @@ invalid_block_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { + D(fprintf(stderr, "%*c+ invalid_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE !INDENT")); _res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE !INDENT")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12086,15 +14814,19 @@ invalid_block_rule(Parser *p) static void * invalid_comprehension_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ('[' | '(' | '{') starred_expression for_if_clauses if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); void *_tmp_130_var; expr_ty a; asdl_seq* for_if_clauses_var; @@ -12106,17 +14838,22 @@ invalid_comprehension_rule(Parser *p) (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { + D(fprintf(stderr, "%*c+ invalid_comprehension[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "iterable unpacking cannot be used in comprehension" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_comprehension[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12124,15 +14861,19 @@ invalid_comprehension_rule(Parser *p) static void * invalid_dict_comprehension_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '{' '**' bitwise_or for_if_clauses '}' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_dict_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '**' bitwise_or for_if_clauses '}'")); Token * _literal; Token * _literal_1; Token * a; @@ -12150,17 +14891,22 @@ invalid_dict_comprehension_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { + D(fprintf(stderr, "%*c+ invalid_dict_comprehension[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '**' bitwise_or for_if_clauses '}'")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "dict unpacking cannot be used in dict comprehension" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_dict_comprehension[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '**' bitwise_or for_if_clauses '}'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12169,15 +14915,19 @@ invalid_dict_comprehension_rule(Parser *p) static void * invalid_parameters_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // param_no_default* (slash_with_default | param_with_default+) param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); asdl_seq * _loop0_131_var; void *_tmp_132_var; arg_ty param_no_default_var; @@ -12189,17 +14939,22 @@ invalid_parameters_rule(Parser *p) (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { + D(fprintf(stderr, "%*c+ invalid_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); _res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12207,15 +14962,19 @@ invalid_parameters_rule(Parser *p) static void * invalid_star_etc_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '*' (')' | ',' (')' | '**')) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); Token * _literal; void *_tmp_133_var; if ( @@ -12224,19 +14983,25 @@ invalid_star_etc_rule(Parser *p) (_tmp_133_var = _tmp_133_rule(p)) // ')' | ',' (')' | '**') ) { + D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); } { // '*' ',' TYPE_COMMENT if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' ',' TYPE_COMMENT")); Token * _literal; Token * _literal_1; Token * type_comment_var; @@ -12248,17 +15013,22 @@ invalid_star_etc_rule(Parser *p) (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' ) { + D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' ',' TYPE_COMMENT")); _res = RAISE_SYNTAX_ERROR ( "bare * has associated type comment" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' ',' TYPE_COMMENT")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12266,15 +15036,19 @@ invalid_star_etc_rule(Parser *p) static void * invalid_lambda_star_etc_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '*' (':' | ',' (':' | '**')) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); Token * _literal; void *_tmp_134_var; if ( @@ -12283,17 +15057,22 @@ invalid_lambda_star_etc_rule(Parser *p) (_tmp_134_var = _tmp_134_rule(p)) // ':' | ',' (':' | '**') ) { + D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12301,15 +15080,19 @@ invalid_lambda_star_etc_rule(Parser *p) static void * invalid_double_type_comments_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_double_type_comments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT")); Token * indent_var; Token * newline_var; Token * newline_var_1; @@ -12327,17 +15110,22 @@ invalid_double_type_comments_rule(Parser *p) (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { + D(fprintf(stderr, "%*c+ invalid_double_type_comments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT")); _res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_double_type_comments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12345,15 +15133,19 @@ invalid_double_type_comments_rule(Parser *p) static void * invalid_del_target_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // star_expression &del_target_end if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression &del_target_end")); expr_ty a; if ( (a = star_expression_rule(p)) // star_expression @@ -12361,17 +15153,22 @@ invalid_del_target_rule(Parser *p) _PyPegen_lookahead(1, del_target_end_rule, p) ) { + D(fprintf(stderr, "%*c+ invalid_del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression &del_target_end")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_del_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression &del_target_end")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12379,15 +15176,19 @@ invalid_del_target_rule(Parser *p) static void * invalid_import_from_targets_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // import_from_as_names ',' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> invalid_import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_names ','")); Token * _literal; asdl_seq* import_from_as_names_var; if ( @@ -12396,17 +15197,22 @@ invalid_import_from_targets_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { + D(fprintf(stderr, "%*c+ invalid_import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_names ','")); _res = RAISE_SYNTAX_ERROR ( "trailing comma not allowed without surrounding parentheses" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_names ','")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12414,7 +15220,9 @@ invalid_import_from_targets_rule(Parser *p) static asdl_seq * _loop0_1_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -12424,14 +15232,17 @@ _loop0_1_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // NEWLINE if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_1[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' @@ -12444,6 +15255,7 @@ _loop0_1_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -12452,17 +15264,21 @@ _loop0_1_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_1[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_1_type, _seq); + D(p->level--); return _seq; } @@ -12470,7 +15286,9 @@ _loop0_1_rule(Parser *p) static asdl_seq * _loop0_2_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -12480,14 +15298,17 @@ _loop0_2_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // NEWLINE if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_2[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' @@ -12500,6 +15321,7 @@ _loop0_2_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -12508,17 +15330,21 @@ _loop0_2_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_2[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_2_type, _seq); + D(p->level--); return _seq; } @@ -12526,7 +15352,9 @@ _loop0_2_rule(Parser *p) static asdl_seq * _loop0_4_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -12536,14 +15364,17 @@ _loop0_4_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_4[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -12556,6 +15387,7 @@ _loop0_4_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -12564,6 +15396,7 @@ _loop0_4_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -12572,17 +15405,21 @@ _loop0_4_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_4[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_4_type, _seq); + D(p->level--); return _seq; } @@ -12590,15 +15427,19 @@ _loop0_4_rule(Parser *p) static asdl_seq * _gather_3_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // expression _loop0_4 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_3[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_4")); expr_ty elem; asdl_seq * seq; if ( @@ -12607,13 +15448,17 @@ _gather_3_rule(Parser *p) (seq = _loop0_4_rule(p)) // _loop0_4 ) { + D(fprintf(stderr, "%*c+ _gather_3[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_4")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_3[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_4")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12621,7 +15466,9 @@ _gather_3_rule(Parser *p) static asdl_seq * _loop0_6_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -12631,14 +15478,17 @@ _loop0_6_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -12651,6 +15501,7 @@ _loop0_6_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -12659,6 +15510,7 @@ _loop0_6_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -12667,17 +15519,21 @@ _loop0_6_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_6[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_6_type, _seq); + D(p->level--); return _seq; } @@ -12685,15 +15541,19 @@ _loop0_6_rule(Parser *p) static asdl_seq * _gather_5_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // expression _loop0_6 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_6")); expr_ty elem; asdl_seq * seq; if ( @@ -12702,13 +15562,17 @@ _gather_5_rule(Parser *p) (seq = _loop0_6_rule(p)) // _loop0_6 ) { + D(fprintf(stderr, "%*c+ _gather_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_6")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_5[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_6")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12716,7 +15580,9 @@ _gather_5_rule(Parser *p) static asdl_seq * _loop0_8_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -12726,14 +15592,17 @@ _loop0_8_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -12746,6 +15615,7 @@ _loop0_8_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -12754,6 +15624,7 @@ _loop0_8_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -12762,17 +15633,21 @@ _loop0_8_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_8[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_8_type, _seq); + D(p->level--); return _seq; } @@ -12780,15 +15655,19 @@ _loop0_8_rule(Parser *p) static asdl_seq * _gather_7_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // expression _loop0_8 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_8")); expr_ty elem; asdl_seq * seq; if ( @@ -12797,13 +15676,17 @@ _gather_7_rule(Parser *p) (seq = _loop0_8_rule(p)) // _loop0_8 ) { + D(fprintf(stderr, "%*c+ _gather_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_8")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_7[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_8")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12811,7 +15694,9 @@ _gather_7_rule(Parser *p) static asdl_seq * _loop0_10_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -12821,14 +15706,17 @@ _loop0_10_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -12841,6 +15729,7 @@ _loop0_10_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -12849,6 +15738,7 @@ _loop0_10_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -12857,17 +15747,21 @@ _loop0_10_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_10[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_10_type, _seq); + D(p->level--); return _seq; } @@ -12875,15 +15769,19 @@ _loop0_10_rule(Parser *p) static asdl_seq * _gather_9_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // expression _loop0_10 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_10")); expr_ty elem; asdl_seq * seq; if ( @@ -12892,13 +15790,17 @@ _gather_9_rule(Parser *p) (seq = _loop0_10_rule(p)) // _loop0_10 ) { + D(fprintf(stderr, "%*c+ _gather_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_10")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_9[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_10")); } _res = NULL; done: + D(p->level--); return _res; } @@ -12906,7 +15808,9 @@ _gather_9_rule(Parser *p) static asdl_seq * _loop1_11_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -12916,14 +15820,17 @@ _loop1_11_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // statement if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement")); asdl_seq* statement_var; while ( (statement_var = statement_rule(p)) // statement @@ -12936,6 +15843,7 @@ _loop1_11_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -12944,9 +15852,12 @@ _loop1_11_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_11[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -12954,11 +15865,13 @@ _loop1_11_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_11_type, _seq); + D(p->level--); return _seq; } @@ -12966,7 +15879,9 @@ _loop1_11_rule(Parser *p) static asdl_seq * _loop0_13_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -12976,14 +15891,17 @@ _loop0_13_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ';' small_stmt if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';' small_stmt")); Token * _literal; stmt_ty elem; while ( @@ -12996,6 +15914,7 @@ _loop0_13_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -13004,6 +15923,7 @@ _loop0_13_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -13012,17 +15932,21 @@ _loop0_13_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_13[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';' small_stmt")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_13_type, _seq); + D(p->level--); return _seq; } @@ -13030,15 +15954,19 @@ _loop0_13_rule(Parser *p) static asdl_seq * _gather_12_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // small_stmt _loop0_13 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "small_stmt _loop0_13")); stmt_ty elem; asdl_seq * seq; if ( @@ -13047,13 +15975,17 @@ _gather_12_rule(Parser *p) (seq = _loop0_13_rule(p)) // _loop0_13 ) { + D(fprintf(stderr, "%*c+ _gather_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "small_stmt _loop0_13")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_12[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "small_stmt _loop0_13")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13061,41 +15993,54 @@ _gather_12_rule(Parser *p) static void * _tmp_14_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'import' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 513)) // token='import' ) { + D(fprintf(stderr, "%*c+ _tmp_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); _res = _keyword; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_14[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import'")); } { // 'from' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 514)) // token='from' ) { + D(fprintf(stderr, "%*c+ _tmp_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); _res = _keyword; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_14[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13103,55 +16048,73 @@ _tmp_14_rule(Parser *p) static void * _tmp_15_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'def' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 522)) // token='def' ) { + D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); _res = _keyword; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def'")); } { // '@' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { + D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); } { // ASYNC if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { + D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); _res = async_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13159,41 +16122,54 @@ _tmp_15_rule(Parser *p) static void * _tmp_16_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'class' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 523)) // token='class' ) { + D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); _res = _keyword; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'class'")); } { // '@' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { + D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13201,41 +16177,54 @@ _tmp_16_rule(Parser *p) static void * _tmp_17_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'with' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 519)) // token='with' ) { + D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); _res = _keyword; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with'")); } { // ASYNC if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { + D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); _res = async_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13243,41 +16232,54 @@ _tmp_17_rule(Parser *p) static void * _tmp_18_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'for' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 517)) // token='for' ) { + D(fprintf(stderr, "%*c+ _tmp_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); _res = _keyword; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_18[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for'")); } { // ASYNC if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { + D(fprintf(stderr, "%*c+ _tmp_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); _res = async_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_18[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13285,15 +16287,19 @@ _tmp_18_rule(Parser *p) static void * _tmp_19_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '=' annotated_rhs if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); Token * _literal; expr_ty d; if ( @@ -13302,17 +16308,22 @@ _tmp_19_rule(Parser *p) (d = annotated_rhs_rule(p)) // annotated_rhs ) { + D(fprintf(stderr, "%*c+ _tmp_19[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); _res = d; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_19[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13320,15 +16331,19 @@ _tmp_19_rule(Parser *p) static void * _tmp_20_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '(' single_target ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); Token * _literal; Token * _literal_1; expr_ty b; @@ -13340,31 +16355,41 @@ _tmp_20_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ _tmp_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); _res = b; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_20[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' single_target ')'")); } { // single_subscript_attribute_target if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); expr_ty single_subscript_attribute_target_var; if ( (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target ) { + D(fprintf(stderr, "%*c+ _tmp_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); _res = single_subscript_attribute_target_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_20[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_subscript_attribute_target")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13372,15 +16397,19 @@ _tmp_20_rule(Parser *p) static void * _tmp_21_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '=' annotated_rhs if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); Token * _literal; expr_ty d; if ( @@ -13389,17 +16418,22 @@ _tmp_21_rule(Parser *p) (d = annotated_rhs_rule(p)) // annotated_rhs ) { + D(fprintf(stderr, "%*c+ _tmp_21[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); _res = d; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_21[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13407,7 +16441,9 @@ _tmp_21_rule(Parser *p) static asdl_seq * _loop1_22_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -13417,14 +16453,17 @@ _loop1_22_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // (star_targets '=') if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); void *_tmp_135_var; while ( (_tmp_135_var = _tmp_135_rule(p)) // star_targets '=' @@ -13437,6 +16476,7 @@ _loop1_22_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -13445,9 +16485,12 @@ _loop1_22_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_22[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -13455,11 +16498,13 @@ _loop1_22_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_22_type, _seq); + D(p->level--); return _seq; } @@ -13467,41 +16512,54 @@ _loop1_22_rule(Parser *p) static void * _tmp_23_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // yield_expr if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { + D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { + D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13509,41 +16567,54 @@ _tmp_23_rule(Parser *p) static void * _tmp_24_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // yield_expr if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { + D(fprintf(stderr, "%*c+ _tmp_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_24[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { + D(fprintf(stderr, "%*c+ _tmp_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_24[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13551,7 +16622,9 @@ _tmp_24_rule(Parser *p) static asdl_seq * _loop0_26_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -13561,14 +16634,17 @@ _loop0_26_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); Token * _literal; expr_ty elem; while ( @@ -13581,6 +16657,7 @@ _loop0_26_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -13589,6 +16666,7 @@ _loop0_26_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -13597,17 +16675,21 @@ _loop0_26_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_26[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_26_type, _seq); + D(p->level--); return _seq; } @@ -13615,15 +16697,19 @@ _loop0_26_rule(Parser *p) static asdl_seq * _gather_25_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // NAME _loop0_26 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_26")); expr_ty elem; asdl_seq * seq; if ( @@ -13632,13 +16718,17 @@ _gather_25_rule(Parser *p) (seq = _loop0_26_rule(p)) // _loop0_26 ) { + D(fprintf(stderr, "%*c+ _gather_25[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_26")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_25[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_26")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13646,7 +16736,9 @@ _gather_25_rule(Parser *p) static asdl_seq * _loop0_28_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -13656,14 +16748,17 @@ _loop0_28_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); Token * _literal; expr_ty elem; while ( @@ -13676,6 +16771,7 @@ _loop0_28_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -13684,6 +16780,7 @@ _loop0_28_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -13692,17 +16789,21 @@ _loop0_28_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_28[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_28_type, _seq); + D(p->level--); return _seq; } @@ -13710,15 +16811,19 @@ _loop0_28_rule(Parser *p) static asdl_seq * _gather_27_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // NAME _loop0_28 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_28")); expr_ty elem; asdl_seq * seq; if ( @@ -13727,13 +16832,17 @@ _gather_27_rule(Parser *p) (seq = _loop0_28_rule(p)) // _loop0_28 ) { + D(fprintf(stderr, "%*c+ _gather_27[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_28")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_27[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_28")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13741,15 +16850,19 @@ _gather_27_rule(Parser *p) static void * _tmp_29_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ',' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty z; if ( @@ -13758,17 +16871,22 @@ _tmp_29_rule(Parser *p) (z = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ _tmp_29[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_29[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13776,7 +16894,9 @@ _tmp_29_rule(Parser *p) static asdl_seq * _loop0_30_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -13786,14 +16906,17 @@ _loop0_30_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('.' | '...') if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); void *_tmp_136_var; while ( (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' @@ -13806,6 +16929,7 @@ _loop0_30_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -13814,17 +16938,21 @@ _loop0_30_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_30[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_30_type, _seq); + D(p->level--); return _seq; } @@ -13832,7 +16960,9 @@ _loop0_30_rule(Parser *p) static asdl_seq * _loop1_31_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -13842,14 +16972,17 @@ _loop1_31_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('.' | '...') if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); void *_tmp_137_var; while ( (_tmp_137_var = _tmp_137_rule(p)) // '.' | '...' @@ -13862,6 +16995,7 @@ _loop1_31_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -13870,9 +17004,12 @@ _loop1_31_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_31[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -13880,11 +17017,13 @@ _loop1_31_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_31_type, _seq); + D(p->level--); return _seq; } @@ -13892,7 +17031,9 @@ _loop1_31_rule(Parser *p) static asdl_seq * _loop0_33_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -13902,14 +17043,17 @@ _loop0_33_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' import_from_as_name if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name")); Token * _literal; alias_ty elem; while ( @@ -13922,6 +17066,7 @@ _loop0_33_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -13930,6 +17075,7 @@ _loop0_33_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -13938,17 +17084,21 @@ _loop0_33_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_33[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' import_from_as_name")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_33_type, _seq); + D(p->level--); return _seq; } @@ -13956,15 +17106,19 @@ _loop0_33_rule(Parser *p) static asdl_seq * _gather_32_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // import_from_as_name _loop0_33 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_33")); alias_ty elem; asdl_seq * seq; if ( @@ -13973,13 +17127,17 @@ _gather_32_rule(Parser *p) (seq = _loop0_33_rule(p)) // _loop0_33 ) { + D(fprintf(stderr, "%*c+ _gather_32[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_33")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_32[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_33")); } _res = NULL; done: + D(p->level--); return _res; } @@ -13987,15 +17145,19 @@ _gather_32_rule(Parser *p) static void * _tmp_34_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'as' NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( @@ -14004,17 +17166,22 @@ _tmp_34_rule(Parser *p) (z = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14022,7 +17189,9 @@ _tmp_34_rule(Parser *p) static asdl_seq * _loop0_36_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14032,14 +17201,17 @@ _loop0_36_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' dotted_as_name if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name")); Token * _literal; alias_ty elem; while ( @@ -14052,6 +17224,7 @@ _loop0_36_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -14060,6 +17233,7 @@ _loop0_36_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -14068,17 +17242,21 @@ _loop0_36_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_36[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_as_name")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_36_type, _seq); + D(p->level--); return _seq; } @@ -14086,15 +17264,19 @@ _loop0_36_rule(Parser *p) static asdl_seq * _gather_35_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // dotted_as_name _loop0_36 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_36")); alias_ty elem; asdl_seq * seq; if ( @@ -14103,13 +17285,17 @@ _gather_35_rule(Parser *p) (seq = _loop0_36_rule(p)) // _loop0_36 ) { + D(fprintf(stderr, "%*c+ _gather_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_36")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_35[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_36")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14117,15 +17303,19 @@ _gather_35_rule(Parser *p) static void * _tmp_37_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'as' NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( @@ -14134,17 +17324,22 @@ _tmp_37_rule(Parser *p) (z = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ _tmp_37[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_37[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14152,7 +17347,9 @@ _tmp_37_rule(Parser *p) static asdl_seq * _loop0_39_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14162,14 +17359,17 @@ _loop0_39_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' with_item if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -14182,6 +17382,7 @@ _loop0_39_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -14190,6 +17391,7 @@ _loop0_39_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -14198,17 +17400,21 @@ _loop0_39_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_39[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_39_type, _seq); + D(p->level--); return _seq; } @@ -14216,15 +17422,19 @@ _loop0_39_rule(Parser *p) static asdl_seq * _gather_38_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // with_item _loop0_39 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_39")); withitem_ty elem; asdl_seq * seq; if ( @@ -14233,13 +17443,17 @@ _gather_38_rule(Parser *p) (seq = _loop0_39_rule(p)) // _loop0_39 ) { + D(fprintf(stderr, "%*c+ _gather_38[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_39")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_38[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_39")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14247,7 +17461,9 @@ _gather_38_rule(Parser *p) static asdl_seq * _loop0_41_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14257,14 +17473,17 @@ _loop0_41_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' with_item if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -14277,6 +17496,7 @@ _loop0_41_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -14285,6 +17505,7 @@ _loop0_41_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -14293,17 +17514,21 @@ _loop0_41_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_41[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_41_type, _seq); + D(p->level--); return _seq; } @@ -14311,15 +17536,19 @@ _loop0_41_rule(Parser *p) static asdl_seq * _gather_40_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // with_item _loop0_41 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_41")); withitem_ty elem; asdl_seq * seq; if ( @@ -14328,13 +17557,17 @@ _gather_40_rule(Parser *p) (seq = _loop0_41_rule(p)) // _loop0_41 ) { + D(fprintf(stderr, "%*c+ _gather_40[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_41")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_40[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_41")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14342,7 +17575,9 @@ _gather_40_rule(Parser *p) static asdl_seq * _loop0_43_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14352,14 +17587,17 @@ _loop0_43_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' with_item if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -14372,6 +17610,7 @@ _loop0_43_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -14380,6 +17619,7 @@ _loop0_43_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -14388,17 +17628,21 @@ _loop0_43_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_43[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_43_type, _seq); + D(p->level--); return _seq; } @@ -14406,15 +17650,19 @@ _loop0_43_rule(Parser *p) static asdl_seq * _gather_42_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // with_item _loop0_43 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_43")); withitem_ty elem; asdl_seq * seq; if ( @@ -14423,13 +17671,17 @@ _gather_42_rule(Parser *p) (seq = _loop0_43_rule(p)) // _loop0_43 ) { + D(fprintf(stderr, "%*c+ _gather_42[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_43")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_42[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_43")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14437,7 +17689,9 @@ _gather_42_rule(Parser *p) static asdl_seq * _loop0_45_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14447,14 +17701,17 @@ _loop0_45_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' with_item if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -14467,6 +17724,7 @@ _loop0_45_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -14475,6 +17733,7 @@ _loop0_45_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -14483,17 +17742,21 @@ _loop0_45_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_45[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_45_type, _seq); + D(p->level--); return _seq; } @@ -14501,15 +17764,19 @@ _loop0_45_rule(Parser *p) static asdl_seq * _gather_44_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // with_item _loop0_45 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_45")); withitem_ty elem; asdl_seq * seq; if ( @@ -14518,13 +17785,17 @@ _gather_44_rule(Parser *p) (seq = _loop0_45_rule(p)) // _loop0_45 ) { + D(fprintf(stderr, "%*c+ _gather_44[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_45")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_44[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_45")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14532,15 +17803,19 @@ _gather_44_rule(Parser *p) static void * _tmp_46_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'as' target if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' target")); Token * _keyword; expr_ty t; if ( @@ -14549,17 +17824,22 @@ _tmp_46_rule(Parser *p) (t = target_rule(p)) // target ) { + D(fprintf(stderr, "%*c+ _tmp_46[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' target")); _res = t; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_46[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' target")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14567,7 +17847,9 @@ _tmp_46_rule(Parser *p) static asdl_seq * _loop1_47_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14577,14 +17859,17 @@ _loop1_47_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // except_block if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); excepthandler_ty except_block_var; while ( (except_block_var = except_block_rule(p)) // except_block @@ -14597,6 +17882,7 @@ _loop1_47_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -14605,9 +17891,12 @@ _loop1_47_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_47[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -14615,11 +17904,13 @@ _loop1_47_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_47_type, _seq); + D(p->level--); return _seq; } @@ -14627,15 +17918,19 @@ _loop1_47_rule(Parser *p) static void * _tmp_48_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'as' NAME if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( @@ -14644,17 +17939,22 @@ _tmp_48_rule(Parser *p) (z = _PyPegen_name_token(p)) // NAME ) { + D(fprintf(stderr, "%*c+ _tmp_48[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_48[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14662,15 +17962,19 @@ _tmp_48_rule(Parser *p) static void * _tmp_49_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'from' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression")); Token * _keyword; expr_ty z; if ( @@ -14679,17 +17983,22 @@ _tmp_49_rule(Parser *p) (z = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ _tmp_49[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_49[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14697,15 +18006,19 @@ _tmp_49_rule(Parser *p) static void * _tmp_50_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '->' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty z; if ( @@ -14714,17 +18027,22 @@ _tmp_50_rule(Parser *p) (z = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ _tmp_50[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_50[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14732,15 +18050,19 @@ _tmp_50_rule(Parser *p) static void * _tmp_51_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '->' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty z; if ( @@ -14749,17 +18071,22 @@ _tmp_51_rule(Parser *p) (z = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ _tmp_51[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_51[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14767,15 +18094,19 @@ _tmp_51_rule(Parser *p) static void * _tmp_52_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // NEWLINE INDENT if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); Token * indent_var; Token * newline_var; if ( @@ -14784,13 +18115,17 @@ _tmp_52_rule(Parser *p) (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { + D(fprintf(stderr, "%*c+ _tmp_52[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); _res = _PyPegen_dummy_name(p, newline_var, indent_var); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_52[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT")); } _res = NULL; done: + D(p->level--); return _res; } @@ -14798,7 +18133,9 @@ _tmp_52_rule(Parser *p) static asdl_seq * _loop0_53_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14808,14 +18145,17 @@ _loop0_53_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -14828,6 +18168,7 @@ _loop0_53_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -14836,17 +18177,21 @@ _loop0_53_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_53[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_53_type, _seq); + D(p->level--); return _seq; } @@ -14854,7 +18199,9 @@ _loop0_53_rule(Parser *p) static asdl_seq * _loop0_54_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14864,14 +18211,17 @@ _loop0_54_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -14884,6 +18234,7 @@ _loop0_54_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -14892,17 +18243,21 @@ _loop0_54_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_54[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_54_type, _seq); + D(p->level--); return _seq; } @@ -14910,7 +18265,9 @@ _loop0_54_rule(Parser *p) static asdl_seq * _loop0_55_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14920,14 +18277,17 @@ _loop0_55_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -14940,6 +18300,7 @@ _loop0_55_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -14948,17 +18309,21 @@ _loop0_55_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_55[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_55_type, _seq); + D(p->level--); return _seq; } @@ -14966,7 +18331,9 @@ _loop0_55_rule(Parser *p) static asdl_seq * _loop1_56_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -14976,14 +18343,17 @@ _loop1_56_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -14996,6 +18366,7 @@ _loop1_56_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15004,9 +18375,12 @@ _loop1_56_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_56[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -15014,11 +18388,13 @@ _loop1_56_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_56_type, _seq); + D(p->level--); return _seq; } @@ -15026,7 +18402,9 @@ _loop1_56_rule(Parser *p) static asdl_seq * _loop0_57_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15036,14 +18414,17 @@ _loop0_57_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -15056,6 +18437,7 @@ _loop0_57_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15064,17 +18446,21 @@ _loop0_57_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_57[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_57_type, _seq); + D(p->level--); return _seq; } @@ -15082,7 +18468,9 @@ _loop0_57_rule(Parser *p) static asdl_seq * _loop1_58_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15092,14 +18480,17 @@ _loop1_58_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -15112,6 +18503,7 @@ _loop1_58_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15120,9 +18512,12 @@ _loop1_58_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_58[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -15130,11 +18525,13 @@ _loop1_58_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_58_type, _seq); + D(p->level--); return _seq; } @@ -15142,7 +18539,9 @@ _loop1_58_rule(Parser *p) static asdl_seq * _loop1_59_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15152,14 +18551,17 @@ _loop1_59_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -15172,6 +18574,7 @@ _loop1_59_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15180,9 +18583,12 @@ _loop1_59_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_59[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -15190,11 +18596,13 @@ _loop1_59_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_59_type, _seq); + D(p->level--); return _seq; } @@ -15202,7 +18610,9 @@ _loop1_59_rule(Parser *p) static asdl_seq * _loop1_60_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15212,14 +18622,17 @@ _loop1_60_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -15232,6 +18645,7 @@ _loop1_60_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15240,9 +18654,12 @@ _loop1_60_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -15250,11 +18667,13 @@ _loop1_60_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_60_type, _seq); + D(p->level--); return _seq; } @@ -15262,7 +18681,9 @@ _loop1_60_rule(Parser *p) static asdl_seq * _loop0_61_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15272,14 +18693,17 @@ _loop0_61_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -15292,6 +18716,7 @@ _loop0_61_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15300,17 +18725,21 @@ _loop0_61_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_61[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_61_type, _seq); + D(p->level--); return _seq; } @@ -15318,7 +18747,9 @@ _loop0_61_rule(Parser *p) static asdl_seq * _loop1_62_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15328,14 +18759,17 @@ _loop1_62_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -15348,6 +18782,7 @@ _loop1_62_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15356,9 +18791,12 @@ _loop1_62_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_62[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -15366,11 +18804,13 @@ _loop1_62_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_62_type, _seq); + D(p->level--); return _seq; } @@ -15378,7 +18818,9 @@ _loop1_62_rule(Parser *p) static asdl_seq * _loop0_63_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15388,14 +18830,17 @@ _loop0_63_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -15408,6 +18853,7 @@ _loop0_63_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15416,17 +18862,21 @@ _loop0_63_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_63[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_63_type, _seq); + D(p->level--); return _seq; } @@ -15434,7 +18884,9 @@ _loop0_63_rule(Parser *p) static asdl_seq * _loop1_64_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15444,14 +18896,17 @@ _loop1_64_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -15464,6 +18919,7 @@ _loop1_64_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15472,9 +18928,12 @@ _loop1_64_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_64[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -15482,11 +18941,13 @@ _loop1_64_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_64_type, _seq); + D(p->level--); return _seq; } @@ -15494,7 +18955,9 @@ _loop1_64_rule(Parser *p) static asdl_seq * _loop0_65_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15504,14 +18967,17 @@ _loop0_65_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_maybe_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -15524,6 +18990,7 @@ _loop0_65_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15532,17 +18999,21 @@ _loop0_65_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_65[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_65_type, _seq); + D(p->level--); return _seq; } @@ -15550,7 +19021,9 @@ _loop0_65_rule(Parser *p) static asdl_seq * _loop1_66_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15560,14 +19033,17 @@ _loop1_66_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_maybe_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -15580,6 +19056,7 @@ _loop1_66_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15588,9 +19065,12 @@ _loop1_66_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_66[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -15598,11 +19078,13 @@ _loop1_66_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_66_type, _seq); + D(p->level--); return _seq; } @@ -15610,7 +19092,9 @@ _loop1_66_rule(Parser *p) static asdl_seq * _loop1_67_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15620,14 +19104,17 @@ _loop1_67_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('@' named_expression NEWLINE) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); void *_tmp_138_var; while ( (_tmp_138_var = _tmp_138_rule(p)) // '@' named_expression NEWLINE @@ -15640,6 +19127,7 @@ _loop1_67_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15648,9 +19136,12 @@ _loop1_67_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_67[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('@' named_expression NEWLINE)")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -15658,11 +19149,13 @@ _loop1_67_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_67_type, _seq); + D(p->level--); return _seq; } @@ -15670,15 +19163,19 @@ _loop1_67_rule(Parser *p) static void * _tmp_68_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '(' arguments? ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *z; @@ -15690,17 +19187,22 @@ _tmp_68_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -15708,7 +19210,9 @@ _tmp_68_rule(Parser *p) static asdl_seq * _loop0_70_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15718,14 +19222,17 @@ _loop0_70_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' star_expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_70[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty elem; while ( @@ -15738,6 +19245,7 @@ _loop0_70_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -15746,6 +19254,7 @@ _loop0_70_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15754,17 +19263,21 @@ _loop0_70_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_70[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_70_type, _seq); + D(p->level--); return _seq; } @@ -15772,15 +19285,19 @@ _loop0_70_rule(Parser *p) static asdl_seq * _gather_69_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // star_expression _loop0_70 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_70")); expr_ty elem; asdl_seq * seq; if ( @@ -15789,13 +19306,17 @@ _gather_69_rule(Parser *p) (seq = _loop0_70_rule(p)) // _loop0_70 ) { + D(fprintf(stderr, "%*c+ _gather_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_70")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_69[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression _loop0_70")); } _res = NULL; done: + D(p->level--); return _res; } @@ -15803,7 +19324,9 @@ _gather_69_rule(Parser *p) static asdl_seq * _loop1_71_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15813,14 +19336,17 @@ _loop1_71_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' star_expression) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); void *_tmp_139_var; while ( (_tmp_139_var = _tmp_139_rule(p)) // ',' star_expression @@ -15833,6 +19359,7 @@ _loop1_71_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15841,9 +19368,12 @@ _loop1_71_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_71[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_expression)")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -15851,11 +19381,13 @@ _loop1_71_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_71_type, _seq); + D(p->level--); return _seq; } @@ -15863,7 +19395,9 @@ _loop1_71_rule(Parser *p) static asdl_seq * _loop0_73_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15873,14 +19407,17 @@ _loop0_73_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' star_named_expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression")); Token * _literal; expr_ty elem; while ( @@ -15893,6 +19430,7 @@ _loop0_73_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -15901,6 +19439,7 @@ _loop0_73_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15909,17 +19448,21 @@ _loop0_73_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_73[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_named_expression")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_73_type, _seq); + D(p->level--); return _seq; } @@ -15927,15 +19470,19 @@ _loop0_73_rule(Parser *p) static asdl_seq * _gather_72_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // star_named_expression _loop0_73 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_73")); expr_ty elem; asdl_seq * seq; if ( @@ -15944,13 +19491,17 @@ _gather_72_rule(Parser *p) (seq = _loop0_73_rule(p)) // _loop0_73 ) { + D(fprintf(stderr, "%*c+ _gather_72[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_73")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_72[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_73")); } _res = NULL; done: + D(p->level--); return _res; } @@ -15958,7 +19509,9 @@ _gather_72_rule(Parser *p) static asdl_seq * _loop1_74_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -15968,14 +19521,17 @@ _loop1_74_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' expression) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); void *_tmp_140_var; while ( (_tmp_140_var = _tmp_140_rule(p)) // ',' expression @@ -15988,6 +19544,7 @@ _loop1_74_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -15996,9 +19553,12 @@ _loop1_74_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_74[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' expression)")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16006,11 +19566,13 @@ _loop1_74_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_74_type, _seq); + D(p->level--); return _seq; } @@ -16018,7 +19580,9 @@ _loop1_74_rule(Parser *p) static asdl_seq * _loop0_75_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16028,14 +19592,17 @@ _loop0_75_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -16048,6 +19615,7 @@ _loop0_75_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16056,17 +19624,21 @@ _loop0_75_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_75[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_75_type, _seq); + D(p->level--); return _seq; } @@ -16074,7 +19646,9 @@ _loop0_75_rule(Parser *p) static asdl_seq * _loop0_76_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16084,14 +19658,17 @@ _loop0_76_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -16104,6 +19681,7 @@ _loop0_76_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16112,17 +19690,21 @@ _loop0_76_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_76[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_76_type, _seq); + D(p->level--); return _seq; } @@ -16130,7 +19712,9 @@ _loop0_76_rule(Parser *p) static asdl_seq * _loop0_77_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16140,14 +19724,17 @@ _loop0_77_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -16160,6 +19747,7 @@ _loop0_77_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16168,17 +19756,21 @@ _loop0_77_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_77_type, _seq); + D(p->level--); return _seq; } @@ -16186,7 +19778,9 @@ _loop0_77_rule(Parser *p) static asdl_seq * _loop1_78_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16196,14 +19790,17 @@ _loop1_78_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -16216,6 +19813,7 @@ _loop1_78_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16224,9 +19822,12 @@ _loop1_78_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_78[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16234,11 +19835,13 @@ _loop1_78_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_78_type, _seq); + D(p->level--); return _seq; } @@ -16246,7 +19849,9 @@ _loop1_78_rule(Parser *p) static asdl_seq * _loop0_79_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16256,14 +19861,17 @@ _loop0_79_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -16276,6 +19884,7 @@ _loop0_79_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16284,17 +19893,21 @@ _loop0_79_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_79[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_79_type, _seq); + D(p->level--); return _seq; } @@ -16302,7 +19915,9 @@ _loop0_79_rule(Parser *p) static asdl_seq * _loop1_80_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16312,14 +19927,17 @@ _loop1_80_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -16332,6 +19950,7 @@ _loop1_80_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16340,9 +19959,12 @@ _loop1_80_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_80[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16350,11 +19972,13 @@ _loop1_80_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_80_type, _seq); + D(p->level--); return _seq; } @@ -16362,7 +19986,9 @@ _loop1_80_rule(Parser *p) static asdl_seq * _loop1_81_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16372,14 +19998,17 @@ _loop1_81_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -16392,6 +20021,7 @@ _loop1_81_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16400,9 +20030,12 @@ _loop1_81_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16410,11 +20043,13 @@ _loop1_81_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_81_type, _seq); + D(p->level--); return _seq; } @@ -16422,7 +20057,9 @@ _loop1_81_rule(Parser *p) static asdl_seq * _loop1_82_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16432,14 +20069,17 @@ _loop1_82_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -16452,6 +20092,7 @@ _loop1_82_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16460,9 +20101,12 @@ _loop1_82_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_82[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16470,11 +20114,13 @@ _loop1_82_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_82_type, _seq); + D(p->level--); return _seq; } @@ -16482,7 +20128,9 @@ _loop1_82_rule(Parser *p) static asdl_seq * _loop0_83_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16492,14 +20140,17 @@ _loop0_83_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -16512,6 +20163,7 @@ _loop0_83_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16520,17 +20172,21 @@ _loop0_83_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_83[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_83_type, _seq); + D(p->level--); return _seq; } @@ -16538,7 +20194,9 @@ _loop0_83_rule(Parser *p) static asdl_seq * _loop1_84_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16548,14 +20206,17 @@ _loop1_84_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -16568,6 +20229,7 @@ _loop1_84_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16576,9 +20238,12 @@ _loop1_84_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_84[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16586,11 +20251,13 @@ _loop1_84_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_84_type, _seq); + D(p->level--); return _seq; } @@ -16598,7 +20265,9 @@ _loop1_84_rule(Parser *p) static asdl_seq * _loop0_85_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16608,14 +20277,17 @@ _loop0_85_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -16628,6 +20300,7 @@ _loop0_85_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16636,17 +20309,21 @@ _loop0_85_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_85[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_85_type, _seq); + D(p->level--); return _seq; } @@ -16654,7 +20331,9 @@ _loop0_85_rule(Parser *p) static asdl_seq * _loop1_86_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16664,14 +20343,17 @@ _loop1_86_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -16684,6 +20366,7 @@ _loop1_86_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16692,9 +20375,12 @@ _loop1_86_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16702,11 +20388,13 @@ _loop1_86_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_86_type, _seq); + D(p->level--); return _seq; } @@ -16714,7 +20402,9 @@ _loop1_86_rule(Parser *p) static asdl_seq * _loop0_87_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16724,14 +20414,17 @@ _loop0_87_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_maybe_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -16744,6 +20437,7 @@ _loop0_87_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16752,17 +20446,21 @@ _loop0_87_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_87[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_87_type, _seq); + D(p->level--); return _seq; } @@ -16770,7 +20468,9 @@ _loop0_87_rule(Parser *p) static asdl_seq * _loop1_88_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16780,14 +20480,17 @@ _loop1_88_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // lambda_param_maybe_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -16800,6 +20503,7 @@ _loop1_88_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16808,9 +20512,12 @@ _loop1_88_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_88[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16818,11 +20525,13 @@ _loop1_88_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_88_type, _seq); + D(p->level--); return _seq; } @@ -16830,7 +20539,9 @@ _loop1_88_rule(Parser *p) static asdl_seq * _loop1_89_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16840,14 +20551,17 @@ _loop1_89_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('or' conjunction) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); void *_tmp_141_var; while ( (_tmp_141_var = _tmp_141_rule(p)) // 'or' conjunction @@ -16860,6 +20574,7 @@ _loop1_89_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16868,9 +20583,12 @@ _loop1_89_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_89[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('or' conjunction)")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16878,11 +20596,13 @@ _loop1_89_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_89_type, _seq); + D(p->level--); return _seq; } @@ -16890,7 +20610,9 @@ _loop1_89_rule(Parser *p) static asdl_seq * _loop1_90_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16900,14 +20622,17 @@ _loop1_90_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('and' inversion) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); void *_tmp_142_var; while ( (_tmp_142_var = _tmp_142_rule(p)) // 'and' inversion @@ -16920,6 +20645,7 @@ _loop1_90_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16928,9 +20654,12 @@ _loop1_90_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_90[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('and' inversion)")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16938,11 +20667,13 @@ _loop1_90_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_90_type, _seq); + D(p->level--); return _seq; } @@ -16950,7 +20681,9 @@ _loop1_90_rule(Parser *p) static asdl_seq * _loop1_91_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -16960,14 +20693,17 @@ _loop1_91_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // compare_op_bitwise_or_pair if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair")); CmpopExprPair* compare_op_bitwise_or_pair_var; while ( (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair @@ -16980,6 +20716,7 @@ _loop1_91_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -16988,9 +20725,12 @@ _loop1_91_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_91[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compare_op_bitwise_or_pair")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -16998,11 +20738,13 @@ _loop1_91_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_91_type, _seq); + D(p->level--); return _seq; } @@ -17010,31 +20752,40 @@ _loop1_91_rule(Parser *p) static void * _tmp_92_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '!=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='")); Token * tok; if ( (tok = _PyPegen_expect_token(p, 28)) // token='!=' ) { + D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='")); _res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!='")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17042,7 +20793,9 @@ _tmp_92_rule(Parser *p) static asdl_seq * _loop0_94_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -17052,14 +20805,17 @@ _loop0_94_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' slice if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' slice")); Token * _literal; expr_ty elem; while ( @@ -17072,6 +20828,7 @@ _loop0_94_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -17080,6 +20837,7 @@ _loop0_94_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -17088,17 +20846,21 @@ _loop0_94_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_94[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' slice")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_94_type, _seq); + D(p->level--); return _seq; } @@ -17106,15 +20868,19 @@ _loop0_94_rule(Parser *p) static asdl_seq * _gather_93_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // slice _loop0_94 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice _loop0_94")); expr_ty elem; asdl_seq * seq; if ( @@ -17123,13 +20889,17 @@ _gather_93_rule(Parser *p) (seq = _loop0_94_rule(p)) // _loop0_94 ) { + D(fprintf(stderr, "%*c+ _gather_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice _loop0_94")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_93[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice _loop0_94")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17137,15 +20907,19 @@ _gather_93_rule(Parser *p) static void * _tmp_95_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ':' expression? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?")); Token * _literal; void *d; if ( @@ -17154,17 +20928,22 @@ _tmp_95_rule(Parser *p) (d = expression_rule(p), 1) // expression? ) { + D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?")); _res = d; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17172,55 +20951,73 @@ _tmp_95_rule(Parser *p) static void * _tmp_96_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // tuple if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple ) { + D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); _res = tuple_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); } { // group if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group")); expr_ty group_var; if ( (group_var = group_rule(p)) // group ) { + D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group")); _res = group_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "group")); } { // genexp if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); expr_ty genexp_var; if ( (genexp_var = genexp_rule(p)) // genexp ) { + D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); _res = genexp_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17228,41 +21025,54 @@ _tmp_96_rule(Parser *p) static void * _tmp_97_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // list if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); expr_ty list_var; if ( (list_var = list_rule(p)) // list ) { + D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); _res = list_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); } { // listcomp if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp")); expr_ty listcomp_var; if ( (listcomp_var = listcomp_rule(p)) // listcomp ) { + D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp")); _res = listcomp_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "listcomp")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17270,69 +21080,92 @@ _tmp_97_rule(Parser *p) static void * _tmp_98_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // dict if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict")); expr_ty dict_var; if ( (dict_var = dict_rule(p)) // dict ) { + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict")); _res = dict_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dict")); } { // set if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set")); expr_ty set_var; if ( (set_var = set_rule(p)) // set ) { + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set")); _res = set_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "set")); } { // dictcomp if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp")); expr_ty dictcomp_var; if ( (dictcomp_var = dictcomp_rule(p)) // dictcomp ) { + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp")); _res = dictcomp_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dictcomp")); } { // setcomp if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp")); expr_ty setcomp_var; if ( (setcomp_var = setcomp_rule(p)) // setcomp ) { + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp")); _res = setcomp_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "setcomp")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17340,7 +21173,9 @@ _tmp_98_rule(Parser *p) static asdl_seq * _loop1_99_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -17350,14 +21185,17 @@ _loop1_99_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // STRING if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING")); expr_ty string_var; while ( (string_var = _PyPegen_string_token(p)) // STRING @@ -17370,6 +21208,7 @@ _loop1_99_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -17378,9 +21217,12 @@ _loop1_99_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_99[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -17388,11 +21230,13 @@ _loop1_99_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_99_type, _seq); + D(p->level--); return _seq; } @@ -17400,15 +21244,19 @@ _loop1_99_rule(Parser *p) static void * _tmp_100_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // star_named_expression ',' star_named_expressions? if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); Token * _literal; expr_ty y; void *z; @@ -17420,17 +21268,22 @@ _tmp_100_rule(Parser *p) (z = star_named_expressions_rule(p), 1) // star_named_expressions? ) { + D(fprintf(stderr, "%*c+ _tmp_100[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); _res = _PyPegen_seq_insert_in_front ( p , y , z ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_100[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17438,41 +21291,54 @@ _tmp_100_rule(Parser *p) static void * _tmp_101_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // yield_expr if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { + D(fprintf(stderr, "%*c+ _tmp_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_101[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // named_expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); expr_ty named_expression_var; if ( (named_expression_var = named_expression_rule(p)) // named_expression ) { + D(fprintf(stderr, "%*c+ _tmp_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); _res = named_expression_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_101[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17480,7 +21346,9 @@ _tmp_101_rule(Parser *p) static asdl_seq * _loop0_103_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -17490,14 +21358,17 @@ _loop0_103_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' double_starred_kvpair if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); Token * _literal; KeyValuePair* elem; while ( @@ -17510,6 +21381,7 @@ _loop0_103_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -17518,6 +21390,7 @@ _loop0_103_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -17526,17 +21399,21 @@ _loop0_103_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_103[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_103_type, _seq); + D(p->level--); return _seq; } @@ -17544,15 +21421,19 @@ _loop0_103_rule(Parser *p) static asdl_seq * _gather_102_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // double_starred_kvpair _loop0_103 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_103")); KeyValuePair* elem; asdl_seq * seq; if ( @@ -17561,13 +21442,17 @@ _gather_102_rule(Parser *p) (seq = _loop0_103_rule(p)) // _loop0_103 ) { + D(fprintf(stderr, "%*c+ _gather_102[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_103")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_102[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_103")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17575,7 +21460,9 @@ _gather_102_rule(Parser *p) static asdl_seq * _loop1_104_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -17585,14 +21472,17 @@ _loop1_104_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // for_if_clause if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause")); comprehension_ty for_if_clause_var; while ( (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause @@ -17605,6 +21495,7 @@ _loop1_104_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -17613,9 +21504,12 @@ _loop1_104_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_104[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -17623,11 +21517,13 @@ _loop1_104_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_104_type, _seq); + D(p->level--); return _seq; } @@ -17635,7 +21531,9 @@ _loop1_104_rule(Parser *p) static asdl_seq * _loop0_105_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -17645,14 +21543,17 @@ _loop0_105_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('if' disjunction) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); void *_tmp_143_var; while ( (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction @@ -17665,6 +21566,7 @@ _loop0_105_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -17673,17 +21575,21 @@ _loop0_105_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_105[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_105_type, _seq); + D(p->level--); return _seq; } @@ -17691,7 +21597,9 @@ _loop0_105_rule(Parser *p) static asdl_seq * _loop0_106_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -17701,14 +21609,17 @@ _loop0_106_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('if' disjunction) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); void *_tmp_144_var; while ( (_tmp_144_var = _tmp_144_rule(p)) // 'if' disjunction @@ -17721,6 +21632,7 @@ _loop0_106_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -17729,17 +21641,21 @@ _loop0_106_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_106[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_106_type, _seq); + D(p->level--); return _seq; } @@ -17747,15 +21663,19 @@ _loop0_106_rule(Parser *p) static void * _tmp_107_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ',' args if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); Token * _literal; expr_ty c; if ( @@ -17764,17 +21684,22 @@ _tmp_107_rule(Parser *p) (c = args_rule(p)) // args ) { + D(fprintf(stderr, "%*c+ _tmp_107[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_107[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' args")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17782,15 +21707,19 @@ _tmp_107_rule(Parser *p) static void * _tmp_108_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ',' args if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); Token * _literal; expr_ty c; if ( @@ -17799,17 +21728,22 @@ _tmp_108_rule(Parser *p) (c = args_rule(p)) // args ) { + D(fprintf(stderr, "%*c+ _tmp_108[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_108[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' args")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17817,7 +21751,9 @@ _tmp_108_rule(Parser *p) static asdl_seq * _loop0_110_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -17827,14 +21763,17 @@ _loop0_110_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' kwarg_or_starred if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -17847,6 +21786,7 @@ _loop0_110_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -17855,6 +21795,7 @@ _loop0_110_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -17863,17 +21804,21 @@ _loop0_110_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_110[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_110_type, _seq); + D(p->level--); return _seq; } @@ -17881,15 +21826,19 @@ _loop0_110_rule(Parser *p) static asdl_seq * _gather_109_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // kwarg_or_starred _loop0_110 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_110")); KeywordOrStarred* elem; asdl_seq * seq; if ( @@ -17898,13 +21847,17 @@ _gather_109_rule(Parser *p) (seq = _loop0_110_rule(p)) // _loop0_110 ) { + D(fprintf(stderr, "%*c+ _gather_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_110")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_109[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_110")); } _res = NULL; done: + D(p->level--); return _res; } @@ -17912,7 +21865,9 @@ _gather_109_rule(Parser *p) static asdl_seq * _loop0_112_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -17922,14 +21877,17 @@ _loop0_112_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' kwarg_or_double_starred if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -17942,6 +21900,7 @@ _loop0_112_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -17950,6 +21909,7 @@ _loop0_112_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -17958,17 +21918,21 @@ _loop0_112_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_112_type, _seq); + D(p->level--); return _seq; } @@ -17976,15 +21940,19 @@ _loop0_112_rule(Parser *p) static asdl_seq * _gather_111_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // kwarg_or_double_starred _loop0_112 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_112")); KeywordOrStarred* elem; asdl_seq * seq; if ( @@ -17993,13 +21961,17 @@ _gather_111_rule(Parser *p) (seq = _loop0_112_rule(p)) // _loop0_112 ) { + D(fprintf(stderr, "%*c+ _gather_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_112")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_111[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_112")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18007,7 +21979,9 @@ _gather_111_rule(Parser *p) static asdl_seq * _loop0_114_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -18017,14 +21991,17 @@ _loop0_114_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' kwarg_or_starred if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -18037,6 +22014,7 @@ _loop0_114_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -18045,6 +22023,7 @@ _loop0_114_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -18053,17 +22032,21 @@ _loop0_114_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_114[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_114_type, _seq); + D(p->level--); return _seq; } @@ -18071,15 +22054,19 @@ _loop0_114_rule(Parser *p) static asdl_seq * _gather_113_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // kwarg_or_starred _loop0_114 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_114")); KeywordOrStarred* elem; asdl_seq * seq; if ( @@ -18088,13 +22075,17 @@ _gather_113_rule(Parser *p) (seq = _loop0_114_rule(p)) // _loop0_114 ) { + D(fprintf(stderr, "%*c+ _gather_113[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_114")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_113[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_114")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18102,7 +22093,9 @@ _gather_113_rule(Parser *p) static asdl_seq * _loop0_116_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -18112,14 +22105,17 @@ _loop0_116_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' kwarg_or_double_starred if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -18132,6 +22128,7 @@ _loop0_116_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -18140,6 +22137,7 @@ _loop0_116_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -18148,17 +22146,21 @@ _loop0_116_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_116[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_116_type, _seq); + D(p->level--); return _seq; } @@ -18166,15 +22168,19 @@ _loop0_116_rule(Parser *p) static asdl_seq * _gather_115_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // kwarg_or_double_starred _loop0_116 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_116")); KeywordOrStarred* elem; asdl_seq * seq; if ( @@ -18183,13 +22189,17 @@ _gather_115_rule(Parser *p) (seq = _loop0_116_rule(p)) // _loop0_116 ) { + D(fprintf(stderr, "%*c+ _gather_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_116")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_115[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_116")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18197,7 +22207,9 @@ _gather_115_rule(Parser *p) static asdl_seq * _loop0_117_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -18207,14 +22219,17 @@ _loop0_117_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' star_target) if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); void *_tmp_145_var; while ( (_tmp_145_var = _tmp_145_rule(p)) // ',' star_target @@ -18227,6 +22242,7 @@ _loop0_117_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -18235,17 +22251,21 @@ _loop0_117_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_117[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_117_type, _seq); + D(p->level--); return _seq; } @@ -18253,7 +22273,9 @@ _loop0_117_rule(Parser *p) static asdl_seq * _loop0_119_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -18263,14 +22285,17 @@ _loop0_119_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' star_target if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty elem; while ( @@ -18283,6 +22308,7 @@ _loop0_119_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -18291,6 +22317,7 @@ _loop0_119_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -18299,17 +22326,21 @@ _loop0_119_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_119[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_119_type, _seq); + D(p->level--); return _seq; } @@ -18317,15 +22348,19 @@ _loop0_119_rule(Parser *p) static asdl_seq * _gather_118_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // star_target _loop0_119 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_119")); expr_ty elem; asdl_seq * seq; if ( @@ -18334,13 +22369,17 @@ _gather_118_rule(Parser *p) (seq = _loop0_119_rule(p)) // _loop0_119 ) { + D(fprintf(stderr, "%*c+ _gather_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_119")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_118[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_119")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18348,15 +22387,19 @@ _gather_118_rule(Parser *p) static void * _tmp_120_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // !'*' star_target if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); expr_ty star_target_var; if ( _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' @@ -18364,13 +22407,17 @@ _tmp_120_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { + D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); _res = star_target_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!'*' star_target")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18378,7 +22425,9 @@ _tmp_120_rule(Parser *p) static asdl_seq * _loop0_122_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -18388,14 +22437,17 @@ _loop0_122_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' del_target if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target")); Token * _literal; expr_ty elem; while ( @@ -18408,6 +22460,7 @@ _loop0_122_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -18416,6 +22469,7 @@ _loop0_122_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -18424,17 +22478,21 @@ _loop0_122_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_122[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' del_target")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_122_type, _seq); + D(p->level--); return _seq; } @@ -18442,15 +22500,19 @@ _loop0_122_rule(Parser *p) static asdl_seq * _gather_121_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // del_target _loop0_122 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_122")); expr_ty elem; asdl_seq * seq; if ( @@ -18459,13 +22521,17 @@ _gather_121_rule(Parser *p) (seq = _loop0_122_rule(p)) // _loop0_122 ) { + D(fprintf(stderr, "%*c+ _gather_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_122")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_121[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_122")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18473,7 +22539,9 @@ _gather_121_rule(Parser *p) static asdl_seq * _loop0_124_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -18483,14 +22551,17 @@ _loop0_124_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // ',' target if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' target")); Token * _literal; expr_ty elem; while ( @@ -18503,6 +22574,7 @@ _loop0_124_rule(Parser *p) if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; PyMem_Free(_children); + D(p->level--); return NULL; } if (_n == _children_capacity) { @@ -18511,6 +22583,7 @@ _loop0_124_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -18519,17 +22592,21 @@ _loop0_124_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_124[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' target")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_124_type, _seq); + D(p->level--); return _seq; } @@ -18537,15 +22614,19 @@ _loop0_124_rule(Parser *p) static asdl_seq * _gather_123_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } asdl_seq * _res = NULL; int _mark = p->mark; { // target _loop0_124 if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _gather_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "target _loop0_124")); expr_ty elem; asdl_seq * seq; if ( @@ -18554,13 +22635,17 @@ _gather_123_rule(Parser *p) (seq = _loop0_124_rule(p)) // _loop0_124 ) { + D(fprintf(stderr, "%*c+ _gather_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "target _loop0_124")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_123[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "target _loop0_124")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18568,29 +22653,38 @@ _gather_123_rule(Parser *p) static void * _tmp_125_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // args if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args")); expr_ty args_var; if ( (args_var = args_rule(p)) // args ) { + D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args")); _res = args_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args")); } { // expression for_if_clauses if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); expr_ty expression_var; asdl_seq* for_if_clauses_var; if ( @@ -18599,13 +22693,17 @@ _tmp_125_rule(Parser *p) (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { + D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); _res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18613,7 +22711,9 @@ _tmp_125_rule(Parser *p) static asdl_seq * _loop0_126_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -18623,14 +22723,17 @@ _loop0_126_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // star_named_expressions if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); asdl_seq* star_named_expressions_var; while ( (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions @@ -18643,6 +22746,7 @@ _loop0_126_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -18651,17 +22755,21 @@ _loop0_126_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_126[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_126_type, _seq); + D(p->level--); return _seq; } @@ -18669,15 +22777,19 @@ _loop0_126_rule(Parser *p) static void * _tmp_127_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '=' annotated_rhs if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); Token * _literal; expr_ty annotated_rhs_var; if ( @@ -18686,13 +22798,17 @@ _tmp_127_rule(Parser *p) (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs ) { + D(fprintf(stderr, "%*c+ _tmp_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); _res = _PyPegen_dummy_name(p, _literal, annotated_rhs_var); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_127[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18700,41 +22816,54 @@ _tmp_127_rule(Parser *p) static void * _tmp_128_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // yield_expr if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { + D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { + D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18742,41 +22871,54 @@ _tmp_128_rule(Parser *p) static void * _tmp_129_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // yield_expr if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { + D(fprintf(stderr, "%*c+ _tmp_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_129[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { + D(fprintf(stderr, "%*c+ _tmp_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_129[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18784,55 +22926,73 @@ _tmp_129_rule(Parser *p) static void * _tmp_130_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '[' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { + D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '(' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { + D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); } { // '{' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { + D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18840,7 +23000,9 @@ _tmp_130_rule(Parser *p) static asdl_seq * _loop0_131_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -18850,14 +23012,17 @@ _loop0_131_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_no_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop0_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -18870,6 +23035,7 @@ _loop0_131_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -18878,17 +23044,21 @@ _loop0_131_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_131[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop0_131_type, _seq); + D(p->level--); return _seq; } @@ -18896,41 +23066,54 @@ _loop0_131_rule(Parser *p) static void * _tmp_132_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // slash_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { + D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } { // param_with_default+ if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); asdl_seq * _loop1_146_var; if ( (_loop1_146_var = _loop1_146_rule(p)) // param_with_default+ ) { + D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); _res = _loop1_146_var; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default+")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18938,29 +23121,38 @@ _tmp_132_rule(Parser *p) static void * _tmp_133_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' (')' | '**') if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; void *_tmp_147_var; if ( @@ -18969,13 +23161,17 @@ _tmp_133_rule(Parser *p) (_tmp_147_var = _tmp_147_rule(p)) // ')' | '**' ) { + D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); _res = _PyPegen_dummy_name(p, _literal, _tmp_147_var); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); } _res = NULL; done: + D(p->level--); return _res; } @@ -18983,29 +23179,38 @@ _tmp_133_rule(Parser *p) static void * _tmp_134_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { + D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // ',' (':' | '**') if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; void *_tmp_148_var; if ( @@ -19014,13 +23219,17 @@ _tmp_134_rule(Parser *p) (_tmp_148_var = _tmp_148_rule(p)) // ':' | '**' ) { + D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); _res = _PyPegen_dummy_name(p, _literal, _tmp_148_var); goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19028,15 +23237,19 @@ _tmp_134_rule(Parser *p) static void * _tmp_135_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // star_targets '=' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -19045,17 +23258,22 @@ _tmp_135_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { + D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19063,41 +23281,54 @@ _tmp_135_rule(Parser *p) static void * _tmp_136_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '.' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19105,41 +23336,54 @@ _tmp_136_rule(Parser *p) static void * _tmp_137_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '.' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19147,15 +23391,19 @@ _tmp_137_rule(Parser *p) static void * _tmp_138_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // '@' named_expression NEWLINE if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -19167,17 +23415,22 @@ _tmp_138_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { + D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19185,15 +23438,19 @@ _tmp_138_rule(Parser *p) static void * _tmp_139_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ',' star_expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -19202,17 +23459,22 @@ _tmp_139_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19220,15 +23482,19 @@ _tmp_139_rule(Parser *p) static void * _tmp_140_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ',' expression if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -19237,17 +23503,22 @@ _tmp_140_rule(Parser *p) (c = expression_rule(p)) // expression ) { + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19255,15 +23526,19 @@ _tmp_140_rule(Parser *p) static void * _tmp_141_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'or' conjunction if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -19272,17 +23547,22 @@ _tmp_141_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { + D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19290,15 +23570,19 @@ _tmp_141_rule(Parser *p) static void * _tmp_142_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'and' inversion if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -19307,17 +23591,22 @@ _tmp_142_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { + D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19325,15 +23614,19 @@ _tmp_142_rule(Parser *p) static void * _tmp_143_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'if' disjunction if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -19342,17 +23635,22 @@ _tmp_143_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { + D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19360,15 +23658,19 @@ _tmp_143_rule(Parser *p) static void * _tmp_144_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // 'if' disjunction if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -19377,17 +23679,22 @@ _tmp_144_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { + D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19395,15 +23702,19 @@ _tmp_144_rule(Parser *p) static void * _tmp_145_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ',' star_target if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -19412,17 +23723,22 @@ _tmp_145_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { + D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; + D(p->level--); return NULL; } goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19430,7 +23746,9 @@ _tmp_145_rule(Parser *p) static asdl_seq * _loop1_146_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void *_res = NULL; @@ -19440,14 +23758,17 @@ _loop1_146_rule(Parser *p) if (!_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } ssize_t _children_capacity = 1; ssize_t _n = 0; { // param_with_default if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _loop1_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -19460,6 +23781,7 @@ _loop1_146_rule(Parser *p) if (!_new_children) { p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } _children = _new_children; @@ -19468,9 +23790,12 @@ _loop1_146_rule(Parser *p) _mark = p->mark; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_146[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); + D(p->level--); return NULL; } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -19478,11 +23803,13 @@ _loop1_146_rule(Parser *p) PyMem_Free(_children); p->error_indicator = 1; PyErr_NoMemory(); + D(p->level--); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); _PyPegen_insert_memo(p, _start_mark, _loop1_146_type, _seq); + D(p->level--); return _seq; } @@ -19490,41 +23817,54 @@ _loop1_146_rule(Parser *p) static void * _tmp_147_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ')' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; done: + D(p->level--); return _res; } @@ -19532,41 +23872,54 @@ _tmp_147_rule(Parser *p) static void * _tmp_148_rule(Parser *p) { + D(p->level++); if (p->error_indicator) { + D(p->level--); return NULL; } void * _res = NULL; int _mark = p->mark; { // ':' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' if (p->error_indicator) { + D(p->level--); return NULL; } + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; done: + D(p->level--); return _res; } diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 9cade2a476de3..cd87a9ffd9365 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -1027,6 +1027,7 @@ _PyPegen_Parser_New(struct tok_state *tok, int start_rule, int flags, p->flags = flags; p->feature_version = feature_version; p->known_err_token = NULL; + p->level = 0; return p; } diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index 761e90f06db8e..bd3056e6f2b80 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -72,6 +72,7 @@ typedef struct { int feature_version; growable_comment_array type_ignore_comments; Token *known_err_token; + int level; } Parser; typedef struct { diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 1249d4f683e26..ff7e75a9fdfda 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -29,6 +29,13 @@ EXTENSION_PREFIX = """\ #include "pegen.h" +#ifdef Py_DEBUG +extern int Py_DebugFlag; +#define D(x) if (Py_DebugFlag) x; +#else +#define D(x) +#endif + """ @@ -300,6 +307,16 @@ def __init__( self.debug = debug self.skip_actions = skip_actions + def add_level(self) -> None: + self.print("D(p->level++);") + + def remove_level(self) -> None: + self.print("D(p->level--);") + + def add_return(self, ret_val: str) -> None: + self.remove_level() + self.print(f"return {ret_val};") + def unique_varname(self, name: str = "tmpvar") -> str: new_var = name + "_" + str(self._varname_counter) self._varname_counter += 1 @@ -310,8 +327,8 @@ def call_with_errorcheck_return(self, call_text: str, returnval: str) -> None: self.print(f"int {error_var} = {call_text};") self.print(f"if ({error_var}) {{") with self.indent(): - self.print(f"return {returnval};") - self.print(f"}}") + self.add_return(returnval) + self.print("}") def call_with_errorcheck_goto(self, call_text: str, goto_target: str) -> None: error_var = self.unique_varname() @@ -328,7 +345,7 @@ def out_of_memory_return(self, expr: str, cleanup_code: Optional[str] = None,) - self.print(cleanup_code) self.print("p->error_indicator = 1;") self.print("PyErr_NoMemory();") - self.print("return NULL;") + self.add_return("NULL") self.print(f"}}") def out_of_memory_goto(self, expr: str, goto_target: str) -> None: @@ -415,7 +432,7 @@ def _set_up_token_start_metadata_extraction(self) -> None: self.print("if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) {") with self.indent(): self.print("p->error_indicator = 1;") - self.print("return NULL;") + self.add_return("NULL") self.print("}") self.print("int _start_lineno = p->tokens[_mark]->lineno;") self.print("UNUSED(_start_lineno); // Only used by EXTRA macro") @@ -426,7 +443,7 @@ def _set_up_token_end_metadata_extraction(self) -> None: self.print("Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);") self.print("if (_token == NULL) {") with self.indent(): - self.print("return NULL;") + self.add_return("NULL") self.print("}") self.print("int _end_lineno = _token->end_lineno;") self.print("UNUSED(_end_lineno); // Only used by EXTRA macro") @@ -436,16 +453,18 @@ def _set_up_token_end_metadata_extraction(self) -> None: def _check_for_errors(self) -> None: self.print("if (p->error_indicator) {") with self.indent(): - self.print("return NULL;") + self.add_return("NULL") self.print("}") def _set_up_rule_memoization(self, node: Rule, result_type: str) -> None: self.print("{") with self.indent(): + self.add_level() self.print(f"{result_type} _res = NULL;") - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res)) {{") with self.indent(): - self.print("return _res;") + self.add_return("_res") + self.print("}") self.print("int _mark = p->mark;") self.print("int _resmark = p->mark;") self.print("while (1) {") @@ -462,7 +481,7 @@ def _set_up_rule_memoization(self, node: Rule, result_type: str) -> None: self.print("_res = _raw;") self.print("}") self.print(f"p->mark = _resmark;") - self.print("return _res;") + self.add_return("_res") self.print("}") self.print(f"static {result_type}") self.print(f"{node.name}_raw(Parser *p)") @@ -474,12 +493,14 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N memoize = self._should_memoize(node) with self.indent(): + self.add_level() self._check_for_errors() self.print(f"{result_type} _res = NULL;") if memoize: - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res)) {{") with self.indent(): - self.print("return _res;") + self.add_return("_res") + self.print("}") self.print("int _mark = p->mark;") if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): self._set_up_token_start_metadata_extraction() @@ -487,25 +508,27 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N rhs, is_loop=False, is_gather=node.is_gather(), rulename=node.name, ) if self.debug: - self.print(f'fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') + self.print(f'D(fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark));') self.print("_res = NULL;") self.print(" done:") with self.indent(): if memoize: self.print(f"_PyPegen_insert_memo(p, _mark, {node.name}_type, _res);") - self.print("return _res;") + self.add_return("_res") def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: memoize = self._should_memoize(node) is_repeat1 = node.name.startswith("_loop1") with self.indent(): + self.add_level() self._check_for_errors() self.print("void *_res = NULL;") if memoize: - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res)) {{") with self.indent(): - self.print("return _res;") + self.add_return("_res") + self.print("}") self.print("int _mark = p->mark;") self.print("int _start_mark = p->mark;") self.print("void **_children = PyMem_Malloc(sizeof(void *));") @@ -521,7 +544,7 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: self.print("if (_n == 0 || p->error_indicator) {") with self.indent(): self.print("PyMem_Free(_children);") - self.print("return NULL;") + self.add_return("NULL") self.print("}") self.print("asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena);") self.out_of_memory_return(f"!_seq", cleanup_code="PyMem_Free(_children);") @@ -529,7 +552,7 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: self.print("PyMem_Free(_children);") if node.name: self.print(f"_PyPegen_insert_memo(p, _start_mark, {node.name}_type, _seq);") - self.print("return _seq;") + self.add_return("_seq") def visit_Rule(self, node: Rule) -> None: is_loop = node.is_loop() @@ -594,12 +617,12 @@ def emit_action(self, node: Alt, cleanup_code: Optional[str] = None) -> None: self.print("p->error_indicator = 1;") if cleanup_code: self.print(cleanup_code) - self.print("return NULL;") + self.add_return("NULL") self.print("}") if self.debug: self.print( - f'fprintf(stderr, "Hit with action [%d-%d]: %s\\n", _mark, p->mark, "{node}");' + f'D(fprintf(stderr, "Hit with action [%d-%d]: %s\\n", _mark, p->mark, "{node}"));' ) def emit_default_action(self, is_gather: bool, node: Alt) -> None: @@ -613,7 +636,7 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: else: if self.debug: self.print( - f'fprintf(stderr, "Hit without action [%d:%d]: %s\\n", _mark, p->mark, "{node}");' + f'D(fprintf(stderr, "Hit without action [%d:%d]: %s\\n", _mark, p->mark, "{node}"));' ) self.print( f"_res = _PyPegen_dummy_name(p, {', '.join(self.local_variable_names)});" @@ -621,18 +644,21 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: else: if self.debug: self.print( - f'fprintf(stderr, "Hit with default action [%d:%d]: %s\\n", _mark, p->mark, "{node}");' + f'D(fprintf(stderr, "Hit with default action [%d:%d]: %s\\n", _mark, p->mark, "{node}"));' ) self.print(f"_res = {self.local_variable_names[0]};") def emit_dummy_action(self) -> None: self.print("_res = _PyPegen_dummy_name(p);") - def handle_alt_normal(self, node: Alt, is_gather: bool) -> None: + def handle_alt_normal(self, node: Alt, is_gather: bool, rulename: Optional[str]) -> None: self.join_conditions(keyword="if", node=node) self.print("{") # We have parsed successfully all the conditions for the option. with self.indent(): + self.print( + f'D(fprintf(stderr, "%*c+ {rulename}[%d-%d]: %s succeeded!\\n", p->level, \' \', _mark, p->mark, "{node}"));' + ) # Prepare to emmit the rule action and do so if node.action and "EXTRA" in node.action: self._set_up_token_end_metadata_extraction() @@ -684,6 +710,9 @@ def visit_Alt( self.print(f"{{ // {node}") with self.indent(): self._check_for_errors() + self.print( + f'D(fprintf(stderr, "%*c> {rulename}[%d-%d]: %s\\n", p->level, \' \', _mark, p->mark, "{node}"));' + ) # Prepare variable declarations for the alternative vars = self.collect_vars(node) for v, var_type in sorted(item for item in vars.items() if item[0] is not None): @@ -701,11 +730,18 @@ def visit_Alt( if is_loop: self.handle_alt_loop(node, is_gather, rulename) else: - self.handle_alt_normal(node, is_gather) + self.handle_alt_normal(node, is_gather, rulename) self.print("p->mark = _mark;") + self.print( + f"D(fprintf(stderr, \"%*c%s {rulename}[%d-%d]: %s failed!\\n\", p->level, ' ',\n" + f' p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "{node}"));' + ) if "_cut_var" in vars: - self.print("if (_cut_var) return NULL;") + self.print("if (_cut_var) {") + with self.indent(): + self.add_return("NULL") + self.print("}") self.print("}") def collect_vars(self, node: Alt) -> Dict[Optional[str], Optional[str]]: diff --git a/Tools/peg_generator/pegen/testutil.py b/Tools/peg_generator/pegen/testutil.py index 264659e71768c..920d24657c5ed 100644 --- a/Tools/peg_generator/pegen/testutil.py +++ b/Tools/peg_generator/pegen/testutil.py @@ -96,7 +96,7 @@ def generate_parser_c_extension( # context. assert not os.listdir(path) source = path / "parse.c" - with open(source, "w") as file: + with open(source, "w", encoding="utf-8") as file: genr = CParserGenerator( grammar, ALL_TOKENS, EXACT_TOKENS, NON_EXACT_TOKENS, file, debug=debug ) From webhook-mailer at python.org Mon May 25 15:17:20 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 25 May 2020 19:17:20 -0000 Subject: [Python-checkins] bpo-40750: Do not expand the new parser debug flags if Py_BUILD_CORE is not defined (GH-20393) Message-ID: https://github.com/python/cpython/commit/deb4355a37e41edf1199920789fe9572c1fb43c2 commit: deb4355a37e41edf1199920789fe9572c1fb43c2 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-25T20:17:12+01:00 summary: bpo-40750: Do not expand the new parser debug flags if Py_BUILD_CORE is not defined (GH-20393) files: M Parser/pegen/parse.c M Tools/peg_generator/pegen/c_generator.py diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 2b735472ad620..b63924177d400 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -1,7 +1,7 @@ // @generated by pegen.py from ./Grammar/python.gram #include "pegen.h" -#ifdef Py_DEBUG +#if defined(Py_DEBUG) && defined(Py_BUILD_CORE) extern int Py_DebugFlag; #define D(x) if (Py_DebugFlag) x; #else diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index ff7e75a9fdfda..8bc23911bbbc2 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -29,7 +29,7 @@ EXTENSION_PREFIX = """\ #include "pegen.h" -#ifdef Py_DEBUG +#if defined(Py_DEBUG) && defined(Py_BUILD_CORE) extern int Py_DebugFlag; #define D(x) if (Py_DebugFlag) x; #else From webhook-mailer at python.org Mon May 25 15:19:47 2020 From: webhook-mailer at python.org (Hai Shi) Date: Mon, 25 May 2020 19:19:47 -0000 Subject: [Python-checkins] Closes bpo-40558: update CONTRIBUTING.rst to reflect current branches (GH-19989) Message-ID: https://github.com/python/cpython/commit/2377a9bae3f698efaa81ff0426d0feb14c9f6329 commit: 2377a9bae3f698efaa81ff0426d0feb14c9f6329 branch: master author: Hai Shi committer: GitHub date: 2020-05-25T14:19:42-05:00 summary: Closes bpo-40558: update CONTRIBUTING.rst to reflect current branches (GH-19989) files: M .github/CONTRIBUTING.rst diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst index 7f912e8708419..a81935d3c9da7 100644 --- a/.github/CONTRIBUTING.rst +++ b/.github/CONTRIBUTING.rst @@ -8,17 +8,17 @@ Build Status + `Stable buildbots `_ -- 3.7 +- 3.9 - + `Stable buildbots `_ + + `Stable buildbots `_ -- 3.6 +- 3.8 - + `Stable buildbots `_ + + `Stable buildbots `_ -- 2.7 +- 3.7 - + `Stable buildbots `_ + + `Stable buildbots `_ Thank You From webhook-mailer at python.org Mon May 25 15:42:33 2020 From: webhook-mailer at python.org (Rotuna) Date: Mon, 25 May 2020 19:42:33 -0000 Subject: [Python-checkins] bpo-23082: Better error message for PurePath.relative_to() from pathlib (GH-19611) Message-ID: https://github.com/python/cpython/commit/448325369ff73011d34d6c3a493014fe3ead8843 commit: 448325369ff73011d34d6c3a493014fe3ead8843 branch: master author: Rotuna committer: GitHub date: 2020-05-25T20:42:28+01:00 summary: bpo-23082: Better error message for PurePath.relative_to() from pathlib (GH-19611) Co-authored-by: Sadhana Srinivasan files: A Misc/NEWS.d/next/Library/2020-04-20-22-08-36.bpo-23082.iX90Id.rst M Doc/library/pathlib.rst M Lib/pathlib.py diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 83f7c836f0e71..bf6fee44df2c8 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -551,7 +551,9 @@ Pure paths provide the following methods and properties: File "", line 1, in File "pathlib.py", line 694, in relative_to .format(str(self), str(formatted))) - ValueError: '/etc/passwd' does not start with '/usr' + ValueError: '/etc/passwd' is not in the subpath of '/usr' OR one path is relative and the other absolute. + + NOTE: This function is part of :class:`PurePath` and works with strings. It does not check or access the underlying file structure. .. method:: PurePath.with_name(name) diff --git a/Lib/pathlib.py b/Lib/pathlib.py index f98d69eb04ac3..9f5e27b91178e 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -922,7 +922,8 @@ def relative_to(self, *other): cf = self._flavour.casefold_parts if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts): formatted = self._format_parsed_parts(to_drv, to_root, to_parts) - raise ValueError("{!r} does not start with {!r}" + raise ValueError("{!r} is not in the subpath of {!r}" + " OR one path is relative and the other is absolute." .format(str(self), str(formatted))) return self._from_parsed_parts('', root if n == 1 else '', abs_parts[n:]) diff --git a/Misc/NEWS.d/next/Library/2020-04-20-22-08-36.bpo-23082.iX90Id.rst b/Misc/NEWS.d/next/Library/2020-04-20-22-08-36.bpo-23082.iX90Id.rst new file mode 100644 index 0000000000000..13ed0defe529c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-20-22-08-36.bpo-23082.iX90Id.rst @@ -0,0 +1 @@ +Updated the error message and docs of PurePath.relative_to() to better reflect the function behaviour. \ No newline at end of file From webhook-mailer at python.org Mon May 25 15:52:05 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Mon, 25 May 2020 19:52:05 -0000 Subject: [Python-checkins] bpo-40688: Use the correct parser in the peg_generator scripts (GH-20235) Message-ID: https://github.com/python/cpython/commit/9645930b5bc1833ef495891d22052d1ba65ab7ea commit: 9645930b5bc1833ef495891d22052d1ba65ab7ea branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-25T20:51:58+01:00 summary: bpo-40688: Use the correct parser in the peg_generator scripts (GH-20235) The scripts in `Tools/peg_generator/scripts` mostly assume that `ast.parse` and `compile` use the old parser, since this was the state of things, while we were developing them. They need to be updated to always use the correct parser. `_peg_parser` is being extended to support both parsing and compiling with both parsers. files: M Modules/_peg_parser.c M Tools/peg_generator/Makefile M Tools/peg_generator/scripts/benchmark.py M Tools/peg_generator/scripts/show_parse.py M Tools/peg_generator/scripts/test_parse_directory.py M Tools/peg_generator/scripts/test_pypi_packages.py diff --git a/Modules/_peg_parser.c b/Modules/_peg_parser.c index 3b27b2c9cbaa2..b66d5a83a84f6 100644 --- a/Modules/_peg_parser.c +++ b/Modules/_peg_parser.c @@ -1,104 +1,133 @@ #include #include "pegen_interface.h" -PyObject * -_Py_parse_file(PyObject *self, PyObject *args, PyObject *kwds) +static int +_mode_str_to_int(char *mode_str) { - static char *keywords[] = {"file", "mode", NULL}; - char *filename; - char *mode_str = "exec"; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|s", keywords, &filename, &mode_str)) { - return NULL; - } - int mode; if (strcmp(mode_str, "exec") == 0) { mode = Py_file_input; } + else if (strcmp(mode_str, "eval") == 0) { + mode = Py_eval_input; + } else if (strcmp(mode_str, "single") == 0) { mode = Py_single_input; } else { - return PyErr_Format(PyExc_ValueError, "mode must be either 'exec' or 'single'"); + mode = -1; } + return mode; +} - PyArena *arena = PyArena_New(); - if (arena == NULL) { +static mod_ty +_run_parser(char *str, char *filename, int mode, PyCompilerFlags *flags, PyArena *arena, int oldparser) +{ + mod_ty mod; + if (!oldparser) { + mod = PyPegen_ASTFromString(str, filename, mode, flags, arena); + } + else { + mod = PyParser_ASTFromString(str, filename, mode, flags, arena); + } + return mod; +} + +PyObject * +_Py_compile_string(PyObject *self, PyObject *args, PyObject *kwds) +{ + static char *keywords[] = {"string", "filename", "mode", "oldparser", NULL}; + char *the_string; + char *filename = ""; + char *mode_str = "exec"; + int oldparser = 0; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|ssp", keywords, + &the_string, &filename, &mode_str, &oldparser)) { return NULL; } + int mode = _mode_str_to_int(mode_str); + if (mode == -1) { + return PyErr_Format(PyExc_ValueError, "mode must be either 'exec' or 'eval' or 'single'"); + } + PyCompilerFlags flags = _PyCompilerFlags_INIT; - PyObject *result = NULL; + flags.cf_flags = PyCF_IGNORE_COOKIE; - mod_ty res = PyPegen_ASTFromFilename(filename, mode, &flags, arena); - if (res == NULL) { - goto error; + PyArena *arena = PyArena_New(); + if (arena == NULL) { + return NULL; + } + + mod_ty mod = _run_parser(the_string, filename, mode, &flags, arena, oldparser); + if (mod == NULL) { + PyArena_Free(arena); + return NULL; } - result = PyAST_mod2obj(res); -error: + PyObject *filename_ob = PyUnicode_DecodeFSDefault(filename); + if (filename_ob == NULL) { + PyArena_Free(arena); + return NULL; + } + PyCodeObject *result = PyAST_CompileObject(mod, filename_ob, &flags, -1, arena); + Py_XDECREF(filename_ob); PyArena_Free(arena); - return result; + return (PyObject *)result; } PyObject * _Py_parse_string(PyObject *self, PyObject *args, PyObject *kwds) { - static char *keywords[] = {"string", "mode", "oldparser", NULL}; + static char *keywords[] = {"string", "filename", "mode", "oldparser", NULL}; char *the_string; + char *filename = ""; char *mode_str = "exec"; int oldparser = 0; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|sp", keywords, - &the_string, &mode_str, &oldparser)) { + if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|ssp", keywords, + &the_string, &filename, &mode_str, &oldparser)) { return NULL; } - int mode; - if (strcmp(mode_str, "exec") == 0) { - mode = Py_file_input; - } - else if (strcmp(mode_str, "eval") == 0) { - mode = Py_eval_input; - } - else if (strcmp(mode_str, "single") == 0) { - mode = Py_single_input; - } - else { + int mode = _mode_str_to_int(mode_str); + if (mode == -1) { return PyErr_Format(PyExc_ValueError, "mode must be either 'exec' or 'eval' or 'single'"); } + PyCompilerFlags flags = _PyCompilerFlags_INIT; + flags.cf_flags = PyCF_IGNORE_COOKIE; + PyArena *arena = PyArena_New(); if (arena == NULL) { return NULL; } - PyObject *result = NULL; - - PyCompilerFlags flags = _PyCompilerFlags_INIT; - flags.cf_flags = PyCF_IGNORE_COOKIE; - - mod_ty res; - if (oldparser) { - res = PyParser_ASTFromString(the_string, "", mode, &flags, arena); - } - else { - res = PyPegen_ASTFromString(the_string, "", mode, &flags, arena); - } - if (res == NULL) { - goto error; + mod_ty mod = _run_parser(the_string, filename, mode, &flags, arena, oldparser); + if (mod == NULL) { + PyArena_Free(arena); + return NULL; } - result = PyAST_mod2obj(res); -error: + PyObject *result = PyAST_mod2obj(mod); PyArena_Free(arena); return result; } static PyMethodDef ParseMethods[] = { - {"parse_file", (PyCFunction)(void (*)(void))_Py_parse_file, METH_VARARGS|METH_KEYWORDS, "Parse a file."}, - {"parse_string", (PyCFunction)(void (*)(void))_Py_parse_string, METH_VARARGS|METH_KEYWORDS,"Parse a string."}, + { + "parse_string", + (PyCFunction)(void (*)(void))_Py_parse_string, + METH_VARARGS|METH_KEYWORDS, + "Parse a string, return an AST." + }, + { + "compile_string", + (PyCFunction)(void (*)(void))_Py_compile_string, + METH_VARARGS|METH_KEYWORDS, + "Compile a string, return a code object." + }, {NULL, NULL, 0, NULL} /* Sentinel */ }; diff --git a/Tools/peg_generator/Makefile b/Tools/peg_generator/Makefile index 34763b543c23b..e7a190c1bcd13 100644 --- a/Tools/peg_generator/Makefile +++ b/Tools/peg_generator/Makefile @@ -69,25 +69,22 @@ stats: peg_extension/parse.c data/xxl.py time: time_compile -time_compile: venv peg_extension/parse.c data/xxl.py +time_compile: venv data/xxl.py $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=xxl compile -time_parse: venv peg_extension/parse.c data/xxl.py +time_parse: venv data/xxl.py $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=xxl parse -time_check: venv peg_extension/parse.c data/xxl.py - $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=xxl check +time_old: time_old_compile -time_stdlib: time_stdlib_compile - -time_stdlib_compile: venv peg_extension/parse.c data/xxl.py +time_old_compile: venv data/xxl.py $(VENVPYTHON) scripts/benchmark.py --parser=cpython --target=xxl compile -time_stdlib_parse: venv peg_extension/parse.c data/xxl.py +time_old_parse: venv data/xxl.py $(VENVPYTHON) scripts/benchmark.py --parser=cpython --target=xxl parse -test_local: - $(PYTHON) scripts/test_parse_directory.py \ +time_peg_dir: venv + $(VENVPYTHON) scripts/test_parse_directory.py \ --grammar-file $(GRAMMAR) \ --tokens-file $(TOKENS) \ -d $(TESTDIR) \ @@ -96,8 +93,8 @@ test_local: --exclude "*/failset/**" \ --exclude "*/failset/**/*" -test_global: $(CPYTHON) - $(PYTHON) scripts/test_parse_directory.py \ +time_stdlib: $(CPYTHON) venv + $(VENVPYTHON) scripts/test_parse_directory.py \ --grammar-file $(GRAMMAR) \ --tokens-file $(TOKENS) \ -d $(CPYTHON) \ @@ -113,9 +110,6 @@ mypy: regen-metaparser format-python: black pegen scripts -bench: venv - $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=stdlib check - format: format-python find_max_nesting: diff --git a/Tools/peg_generator/scripts/benchmark.py b/Tools/peg_generator/scripts/benchmark.py index 4942b99b6619f..71512c22a355b 100644 --- a/Tools/peg_generator/scripts/benchmark.py +++ b/Tools/peg_generator/scripts/benchmark.py @@ -6,6 +6,8 @@ import os from time import time +import _peg_parser + try: import memory_profiler except ModuleNotFoundError: @@ -14,8 +16,6 @@ sys.exit(1) sys.path.insert(0, os.getcwd()) -from peg_extension import parse -from pegen.build import build_c_parser_and_generator from scripts.test_parse_directory import parse_directory argparser = argparse.ArgumentParser( @@ -41,9 +41,6 @@ "compile", help="Benchmark parsing and compiling to bytecode" ) command_parse = subcommands.add_parser("parse", help="Benchmark parsing and generating an ast.AST") -command_check = subcommands.add_parser( - "check", help="Benchmark parsing and throwing the tree away" -) def benchmark(func): @@ -66,22 +63,20 @@ def wrapper(*args): @benchmark def time_compile(source, parser): if parser == "cpython": - return compile(source, os.path.join("data", "xxl.py"), "exec") + return _peg_parser.compile_string( + source, + oldparser=True, + ) else: - return parse.parse_string(source, mode=2) + return _peg_parser.compile_string(source) @benchmark def time_parse(source, parser): if parser == "cpython": - return ast.parse(source, os.path.join("data", "xxl.py"), "exec") + return _peg_parser.parse_string(source, oldparser=True) else: - return parse.parse_string(source, mode=1) - - - at benchmark -def time_check(source): - return parse.parse_string(source, mode=0) + return _peg_parser.parse_string(source) def run_benchmark_xxl(subcommand, parser, source): @@ -89,32 +84,20 @@ def run_benchmark_xxl(subcommand, parser, source): time_compile(source, parser) elif subcommand == "parse": time_parse(source, parser) - elif subcommand == "check": - time_check(source) def run_benchmark_stdlib(subcommand, parser): - modes = {"compile": 2, "parse": 1, "check": 0} - extension = None - if parser == "pegen": - extension = build_c_parser_and_generator( - "../../Grammar/python.gram", - "../../Grammar/Tokens", - "peg_extension/parse.c", - compile_extension=True, - skip_actions=False, - ) for _ in range(3): parse_directory( "../../Lib", "../../Grammar/python.gram", + "../../Grammar/Tokens", verbose=False, excluded_files=["*/bad*", "*/lib2to3/tests/data/*",], skip_actions=False, tree_arg=0, short=True, - extension=extension, - mode=modes[subcommand], + mode=2 if subcommand == "compile" else 1, parser=parser, ) @@ -127,8 +110,6 @@ def main(): if subcommand is None: argparser.error("A benchmark to run is required") - if subcommand == "check" and parser == "cpython": - argparser.error("Cannot use check target with the CPython parser") if target == "xxl": with open(os.path.join("data", "xxl.py"), "r") as f: diff --git a/Tools/peg_generator/scripts/show_parse.py b/Tools/peg_generator/scripts/show_parse.py index 1a0410e1bac8f..1c1996f40f74e 100755 --- a/Tools/peg_generator/scripts/show_parse.py +++ b/Tools/peg_generator/scripts/show_parse.py @@ -30,6 +30,8 @@ import sys import tempfile +import _peg_parser + from typing import List sys.path.insert(0, os.getcwd()) @@ -72,7 +74,7 @@ def diff_trees(a: ast.AST, b: ast.AST, verbose: bool = False) -> List[str]: def show_parse(source: str, verbose: bool = False) -> str: - tree = ast.parse(source) + tree = _peg_parser.parse_string(source, oldparser=True) return format_tree(tree, verbose).rstrip("\n") @@ -90,17 +92,11 @@ def main() -> None: sep = " " program = sep.join(args.program) if args.grammar_file: - sys.path.insert(0, os.curdir) - from pegen.build import build_parser_and_generator - - build_parser_and_generator(args.grammar_file, "peg_parser/parse.c", compile_extension=True) - from pegen.parse import parse_string # type: ignore[import] - - tree = parse_string(program, mode=1) + tree = _peg_parser.parse_string(program) if args.diff: a = tree - b = ast.parse(program) + b = _peg_parser.parse_string(program, oldparser=True) diff = diff_trees(a, b, args.verbose) if diff: for line in diff: @@ -111,8 +107,8 @@ def main() -> None: print(f"# Parsed using {args.grammar_file}") print(format_tree(tree, args.verbose)) else: - tree = ast.parse(program) - print("# Parse using ast.parse()") + tree = _peg_parser.parse_string(program, oldparser=True) + print("# Parse using the old parser") print(format_tree(tree, args.verbose)) diff --git a/Tools/peg_generator/scripts/test_parse_directory.py b/Tools/peg_generator/scripts/test_parse_directory.py index aef9c74b52881..e88afe1539ce1 100755 --- a/Tools/peg_generator/scripts/test_parse_directory.py +++ b/Tools/peg_generator/scripts/test_parse_directory.py @@ -6,13 +6,14 @@ import sys import time import traceback +import tokenize +import _peg_parser from glob import glob from pathlib import PurePath from typing import List, Optional, Any sys.path.insert(0, os.getcwd()) -from pegen.build import build_c_parser_and_generator from pegen.ast_dump import ast_dump from pegen.testutil import print_memstats from scripts import show_parse @@ -83,7 +84,7 @@ def compare_trees( actual_tree: ast.AST, file: str, verbose: bool, include_attributes: bool = False, ) -> int: with open(file) as f: - expected_tree = ast.parse(f.read()) + expected_tree = _peg_parser.parse_string(f.read(), oldparser=True) expected_text = ast_dump(expected_tree, include_attributes=include_attributes) actual_text = ast_dump(actual_tree, include_attributes=include_attributes) @@ -121,7 +122,6 @@ def parse_directory( skip_actions: bool, tree_arg: int, short: bool, - extension: Any, mode: int, parser: str, ) -> int: @@ -137,47 +137,21 @@ def parse_directory( if not os.path.exists(grammar_file): print(f"The specified grammar file, {grammar_file}, does not exist.", file=sys.stderr) return 1 - - try: - if not extension and parser == "pegen": - build_c_parser_and_generator( - grammar_file, - tokens_file, - "peg_extension/parse.c", - compile_extension=True, - skip_actions=skip_actions, - ) - except Exception as err: - print( - f"{FAIL}The following error occurred when generating the parser. Please check your grammar file.\n{ENDC}", - file=sys.stderr, - ) - traceback.print_exception(err.__class__, err, None) - - return 1 - else: print( "A grammar file or a tokens file was not provided - attempting to use existing parser from stdlib...\n" ) - if parser == "pegen": - try: - from peg_extension import parse # type: ignore - except Exception as e: - print( - "An existing parser was not found. Please run `make` or specify a grammar file with the `-g` flag.", - file=sys.stderr, - ) - return 1 + if tree_arg: + assert mode == 1, "Mode should be 1 (parse), when comparing the generated trees" # For a given directory, traverse files and attempt to parse each one # - Output success/failure for each file errors = 0 files = [] trees = {} # Trees to compare (after everything else is done) + total_seconds = 0 - t0 = time.time() for file in sorted(glob(f"{directory}/**/*.py", recursive=True)): # Only attempt to parse Python files and files that are not excluded should_exclude_file = False @@ -187,25 +161,31 @@ def parse_directory( break if not should_exclude_file: + with tokenize.open(file) as f: + source = f.read() try: - if tree_arg: - mode = 1 - if parser == "cpython": - with open(file, "r") as f: - source = f.read() - if mode == 2: - compile(source, file, "exec") - elif mode == 1: - ast.parse(source, file, "exec") + t0 = time.time() + if mode == 2: + result = _peg_parser.compile_string( + source, + filename=file, + oldparser=parser == "cpython", + ) else: - tree = parse.parse_file(file, mode=mode) + result = _peg_parser.parse_string( + source, + filename=file, + oldparser=parser == "cpython" + ) + t1 = time.time() + total_seconds += (t1 - t0) if tree_arg: - trees[file] = tree + trees[file] = result if not short: report_status(succeeded=True, file=file, verbose=verbose) except Exception as error: try: - ast.parse(file) + _peg_parser.parse_string(source, mode="exec", oldparser=True) except Exception: if not short: print(f"File {file} cannot be parsed by either pegen or the ast module.") @@ -217,7 +197,6 @@ def parse_directory( files.append(file) t1 = time.time() - total_seconds = t1 - t0 total_files = len(files) total_bytes = 0 @@ -238,13 +217,6 @@ def parse_directory( f"or {total_bytes / total_seconds :,.0f} bytes/sec.", ) - if parser == "pegen": - # Dump memo stats to @data. - with open("@data", "w") as datafile: - for i, count in enumerate(parse.get_memo_stats()): - if count: - datafile.write(f"{i:4d} {count:9d}\n") - if short: print_memstats() @@ -275,6 +247,7 @@ def main() -> None: skip_actions = args.skip_actions tree = args.tree short = args.short + mode = 1 if args.tree else 2 sys.exit( parse_directory( directory, @@ -285,8 +258,7 @@ def main() -> None: skip_actions, tree, short, - None, - 0, + mode, "pegen", ) ) diff --git a/Tools/peg_generator/scripts/test_pypi_packages.py b/Tools/peg_generator/scripts/test_pypi_packages.py index 7586b1a21fa6d..98f77785cdd1c 100755 --- a/Tools/peg_generator/scripts/test_pypi_packages.py +++ b/Tools/peg_generator/scripts/test_pypi_packages.py @@ -54,7 +54,7 @@ def find_dirname(package_name: str) -> str: assert False # This is to fix mypy, should never be reached -def run_tests(dirname: str, tree: int, extension: Any) -> int: +def run_tests(dirname: str, tree: int) -> int: return test_parse_directory.parse_directory( dirname, HERE / ".." / ".." / ".." / "Grammar" / "python.gram", @@ -72,7 +72,6 @@ def run_tests(dirname: str, tree: int, extension: Any) -> int: skip_actions=False, tree_arg=tree, short=True, - extension=extension, mode=1, parser="pegen", ) @@ -82,13 +81,6 @@ def main() -> None: args = argparser.parse_args() tree = args.tree - extension = build.build_c_parser_and_generator( - HERE / ".." / ".." / ".." / "Grammar" / "python.gram", - HERE / ".." / ".." / ".." / "Grammar" / "Tokens", - "peg_extension/parse.c", - compile_extension=True, - ) - for package in get_packages(): print(f"Extracting files from {package}... ", end="") try: @@ -100,7 +92,7 @@ def main() -> None: print(f"Trying to parse all python files ... ") dirname = find_dirname(package) - status = run_tests(dirname, tree, extension) + status = run_tests(dirname, tree) if status == 0: shutil.rmtree(dirname) else: From webhook-mailer at python.org Mon May 25 17:32:31 2020 From: webhook-mailer at python.org (Ammar Askar) Date: Mon, 25 May 2020 21:32:31 -0000 Subject: [Python-checkins] [workflow] Use Sphinx problem matcher on GitHub docs builds (GH-20325) Message-ID: https://github.com/python/cpython/commit/2602d97a0ae92b2d320909024e901c202b003e14 commit: 2602d97a0ae92b2d320909024e901c202b003e14 branch: master author: Ammar Askar committer: GitHub date: 2020-05-25T22:32:24+01:00 summary: [workflow] Use Sphinx problem matcher on GitHub docs builds (GH-20325) files: A .github/problem-matchers/sphinx.json M .github/workflows/doc.yml diff --git a/.github/problem-matchers/sphinx.json b/.github/problem-matchers/sphinx.json new file mode 100644 index 0000000000000..228415f5b7b03 --- /dev/null +++ b/.github/problem-matchers/sphinx.json @@ -0,0 +1,40 @@ +{ + "problemMatcher": [ + { + "owner": "sphinx-problem-matcher", + "pattern": [ + { + "regexp": "^(.*):(\\d+):\\s+(\\w*):\\s+(.*)$", + "file": 1, + "line": 2, + "severity": 3, + "message": 4 + } + ] + }, + { + "owner": "sphinx-problem-matcher-loose", + "pattern": [ + { + "_comment": "A bit of a looser pattern, doesn't look for line numbers, just looks for file names relying on them to start with / and end with .rst", + "regexp": "(\/.*\\.rst):\\s+(\\w*):\\s+(.*)$", + "file": 1, + "severity": 2, + "message": 3 + } + ] + }, + { + "owner": "sphinx-problem-matcher-loose-no-severity", + "pattern": [ + { + "_comment": "Looks for file names ending with .rst and line numbers but without severity", + "regexp": "^(.*\\.rst):(\\d+):(.*)$", + "file": 1, + "line": 2, + "message": 3 + } + ] + } + ] +} \ No newline at end of file diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index d481ea279d796..88c9cbd797ab2 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -25,6 +25,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout at v2 + - name: Register Sphinx problem matcher + run: echo "::add-matcher::.github/problem-matchers/sphinx.json" - name: 'Install Dependencies' run: sudo ./.github/workflows/posix-deps-apt.sh && sudo apt-get install wamerican - name: 'Configure CPython' @@ -34,7 +36,7 @@ jobs: - name: 'Install build dependencies' run: make -C Doc/ PYTHON=../python venv - name: 'Build documentation' - run: xvfb-run make -C Doc/ PYTHON=../python SPHINXOPTS="-q -W -j4" doctest suspicious html + run: xvfb-run make -C Doc/ PYTHON=../python SPHINXOPTS="-q -W --keep-going -j4" doctest suspicious html - name: 'Upload' uses: actions/upload-artifact at v1 with: From webhook-mailer at python.org Mon May 25 20:32:26 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Tue, 26 May 2020 00:32:26 -0000 Subject: [Python-checkins] bpo-38964: Print correct filename on a SyntaxError in an fstring (GH-20399) Message-ID: https://github.com/python/cpython/commit/f7b1e461567e5e3fa3ba46f589d9edc1b45b2dd0 commit: f7b1e461567e5e3fa3ba46f589d9edc1b45b2dd0 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-26T01:32:18+01:00 summary: bpo-38964: Print correct filename on a SyntaxError in an fstring (GH-20399) When a `SyntaxError` in the expression part of a fstring is found, the filename attribute of the `SyntaxError` is always ``. With this commit, it gets changed to always have the name of the file the fstring resides in. Co-authored-by: Pablo Galindo files: A Misc/NEWS.d/next/Core and Builtins/2020-05-25-21-49-11.bpo-38964.lrml90.rst M Lib/test/test_fstring.py M Parser/pegen/parse_string.c diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index e0bb5b56b2614..ea4e589929e7e 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -8,9 +8,12 @@ # Unicode identifiers in tests is allowed by PEP 3131. import ast +import os import types import decimal import unittest +from test.support import temp_cwd, use_old_parser +from test.support.script_helper import assert_python_failure a_global = 'global variable' @@ -1044,6 +1047,16 @@ def test_errors(self): r"f'{1000:j}'", ]) + @unittest.skipIf(use_old_parser(), "The old parser only supports as the filename") + def test_filename_in_syntaxerror(self): + # see issue 38964 + with temp_cwd() as cwd: + file_path = os.path.join(cwd, 't.py') + with open(file_path, 'w') as f: + f.write('f"{a b}"') # This generates a SyntaxError + _, _, stderr = assert_python_failure(file_path) + self.assertIn(file_path, stderr.decode('utf-8')) + def test_loop(self): for i in range(1000): self.assertEqual(f'i:{i}', 'i:' + str(i)) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-25-21-49-11.bpo-38964.lrml90.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-25-21-49-11.bpo-38964.lrml90.rst new file mode 100644 index 0000000000000..1200764306946 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-25-21-49-11.bpo-38964.lrml90.rst @@ -0,0 +1 @@ +When there's a :exc:`SyntaxError` in the expression part of an fstring, the filename attribute of the :exc:`SyntaxError` gets correctly set to the name of the file the fstring resides in. \ No newline at end of file diff --git a/Parser/pegen/parse_string.c b/Parser/pegen/parse_string.c index ca4b733c153b5..a0ec698fa56a2 100644 --- a/Parser/pegen/parse_string.c +++ b/Parser/pegen/parse_string.c @@ -606,11 +606,8 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, if (tok == NULL) { return NULL; } - tok->filename = PyUnicode_FromString(""); - if (!tok->filename) { - PyTokenizer_Free(tok); - return NULL; - } + Py_INCREF(p->tok->filename); + tok->filename = p->tok->filename; Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version, NULL, p->arena); From webhook-mailer at python.org Tue May 26 00:08:45 2020 From: webhook-mailer at python.org (sth) Date: Tue, 26 May 2020 04:08:45 -0000 Subject: [Python-checkins] closes bpo-40774: Fix docs indentation for asyncio.create_subprocess_shell() (GH-20403) Message-ID: https://github.com/python/cpython/commit/4a0ac42c52a4d9ccfb0a78ab02aa03172ce0e31a commit: 4a0ac42c52a4d9ccfb0a78ab02aa03172ce0e31a branch: master author: sth committer: GitHub date: 2020-05-25T23:08:40-05:00 summary: closes bpo-40774: Fix docs indentation for asyncio.create_subprocess_shell() (GH-20403) files: M Doc/library/asyncio-subprocess.rst diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 1d87d2f8005ec..eb1312a949a10 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -95,14 +95,14 @@ Creating Subprocesses See the documentation of :meth:`loop.subprocess_shell` for other parameters. -.. important:: - - It is the application's responsibility to ensure that all whitespace and - special characters are quoted appropriately to avoid `shell injection - `_ - vulnerabilities. The :func:`shlex.quote` function can be used to properly - escape whitespace and special shell characters in strings that are going - to be used to construct shell commands. + .. important:: + + It is the application's responsibility to ensure that all whitespace and + special characters are quoted appropriately to avoid `shell injection + `_ + vulnerabilities. The :func:`shlex.quote` function can be used to properly + escape whitespace and special shell characters in strings that are going + to be used to construct shell commands. .. deprecated-removed:: 3.8 3.10 From webhook-mailer at python.org Tue May 26 00:16:00 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 26 May 2020 04:16:00 -0000 Subject: [Python-checkins] closes bpo-40774: Fix docs indentation for asyncio.create_subprocess_shell() (GH-20403) Message-ID: https://github.com/python/cpython/commit/1f2cc7cedee1a768ee43151c115f6e338751eb8c commit: 1f2cc7cedee1a768ee43151c115f6e338751eb8c branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-25T21:15:52-07:00 summary: closes bpo-40774: Fix docs indentation for asyncio.create_subprocess_shell() (GH-20403) (cherry picked from commit 4a0ac42c52a4d9ccfb0a78ab02aa03172ce0e31a) Co-authored-by: sth files: M Doc/library/asyncio-subprocess.rst diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 1d87d2f8005ec..eb1312a949a10 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -95,14 +95,14 @@ Creating Subprocesses See the documentation of :meth:`loop.subprocess_shell` for other parameters. -.. important:: - - It is the application's responsibility to ensure that all whitespace and - special characters are quoted appropriately to avoid `shell injection - `_ - vulnerabilities. The :func:`shlex.quote` function can be used to properly - escape whitespace and special shell characters in strings that are going - to be used to construct shell commands. + .. important:: + + It is the application's responsibility to ensure that all whitespace and + special characters are quoted appropriately to avoid `shell injection + `_ + vulnerabilities. The :func:`shlex.quote` function can be used to properly + escape whitespace and special shell characters in strings that are going + to be used to construct shell commands. .. deprecated-removed:: 3.8 3.10 From webhook-mailer at python.org Tue May 26 00:33:43 2020 From: webhook-mailer at python.org (Ammar Askar) Date: Tue, 26 May 2020 04:33:43 -0000 Subject: [Python-checkins] Fix peg_generator compiler warnings under MSVC (GH-20405) Message-ID: https://github.com/python/cpython/commit/a2bbedc8b18c001d2f9e702e6e678efbb2990daa commit: a2bbedc8b18c001d2f9e702e6e678efbb2990daa branch: master author: Ammar Askar committer: GitHub date: 2020-05-26T05:33:35+01:00 summary: Fix peg_generator compiler warnings under MSVC (GH-20405) files: M Parser/tokenizer.c M Tools/peg_generator/peg_extension/peg_extension.c diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index b81fa118f216e..cebfadc8e89f3 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -32,10 +32,6 @@ || c == '_'\ || (c >= 128)) -extern char *PyOS_Readline(FILE *, FILE *, const char *); -/* Return malloc'ed string including trailing \n; - empty malloc'ed string for EOF; - NULL if interrupted */ /* Don't ever change this -- it would break the portability of Python code */ #define TABSIZE 8 diff --git a/Tools/peg_generator/peg_extension/peg_extension.c b/Tools/peg_generator/peg_extension/peg_extension.c index fb552eed3ba01..96d3a52b88088 100644 --- a/Tools/peg_generator/peg_extension/peg_extension.c +++ b/Tools/peg_generator/peg_extension/peg_extension.c @@ -96,21 +96,21 @@ parse_string(PyObject *self, PyObject *args, PyObject *kwds) } static PyObject * -clear_memo_stats() +clear_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) { _PyPegen_clear_memo_statistics(); Py_RETURN_NONE; } static PyObject * -get_memo_stats() +get_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) { return _PyPegen_get_memo_statistics(); } // TODO: Write to Python's sys.stdout instead of C's stdout. static PyObject * -dump_memo_stats() +dump_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) { PyObject *list = _PyPegen_get_memo_statistics(); if (list == NULL) { @@ -124,7 +124,7 @@ dump_memo_stats() break; } if (count > 0) { - printf("%4ld %9ld\n", i, count); + printf("%4zd %9ld\n", i, count); } } Py_DECREF(list); From webhook-mailer at python.org Tue May 26 00:39:04 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 26 May 2020 04:39:04 -0000 Subject: [Python-checkins] Simplify creation of the __new__ method in namedtuple() (GH-20361) Message-ID: https://github.com/python/cpython/commit/3cfe5b7b8fb3a0396e62800f3873d9b1f70da1c2 commit: 3cfe5b7b8fb3a0396e62800f3873d9b1f70da1c2 branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-25T21:39:00-07:00 summary: Simplify creation of the __new__ method in namedtuple() (GH-20361) files: M Lib/collections/__init__.py diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index c4bff592dc0e7..011a0c1e7c19d 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -406,11 +406,9 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non # Create all the named tuple methods to be added to the class namespace - s = f'def __new__(_cls, {arg_list}): return _tuple_new(_cls, ({arg_list}))' + s = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))' namespace = {'_tuple_new': tuple_new, '__name__': f'namedtuple_{typename}'} - # Note: exec() has the side-effect of interning the field names - exec(s, namespace) - __new__ = namespace['__new__'] + __new__ = eval(s, namespace) __new__.__doc__ = f'Create new instance of {typename}({arg_list})' if defaults is not None: __new__.__defaults__ = defaults From webhook-mailer at python.org Tue May 26 00:45:42 2020 From: webhook-mailer at python.org (ziheng) Date: Tue, 26 May 2020 04:45:42 -0000 Subject: [Python-checkins] bpo-40745: Fix typos in NewType docs (GH-20379) Message-ID: https://github.com/python/cpython/commit/2b0e654f91f28379c6c7ef5fd80e8754afb70935 commit: 2b0e654f91f28379c6c7ef5fd80e8754afb70935 branch: master author: ziheng committer: GitHub date: 2020-05-25T21:45:35-07:00 summary: bpo-40745: Fix typos in NewType docs (GH-20379) files: M Doc/library/typing.rst diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index fa13c07c44ea3..e85b6d697f79d 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -1021,9 +1021,9 @@ The module defines the following classes, functions and decorators: ``List[ForwardRef("SomeClass")]``. This class should not be instantiated by a user, but may be used by introspection tools. -.. function:: NewType(typ) +.. function:: NewType(name, tp) - A helper function to indicate a distinct types to a typechecker, + A helper function to indicate a distinct type to a typechecker, see :ref:`distinct`. At runtime it returns a function that returns its argument. Usage:: From webhook-mailer at python.org Tue May 26 00:52:59 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 26 May 2020 04:52:59 -0000 Subject: [Python-checkins] bpo-40745: Fix typos in NewType docs (GH-20379) Message-ID: https://github.com/python/cpython/commit/b38bd8888a6e90741a989db2fe321e8b98d5a5c4 commit: b38bd8888a6e90741a989db2fe321e8b98d5a5c4 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-25T21:52:55-07:00 summary: bpo-40745: Fix typos in NewType docs (GH-20379) (cherry picked from commit 2b0e654f91f28379c6c7ef5fd80e8754afb70935) Co-authored-by: ziheng files: M Doc/library/typing.rst diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 7269e181c7349..5fa89423d20bf 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -1020,9 +1020,9 @@ The module defines the following classes, functions and decorators: ``List[ForwardRef("SomeClass")]``. This class should not be instantiated by a user, but may be used by introspection tools. -.. function:: NewType(typ) +.. function:: NewType(name, tp) - A helper function to indicate a distinct types to a typechecker, + A helper function to indicate a distinct type to a typechecker, see :ref:`distinct`. At runtime it returns a function that returns its argument. Usage:: From webhook-mailer at python.org Tue May 26 02:45:05 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 26 May 2020 06:45:05 -0000 Subject: [Python-checkins] bpo-39830: Add zipfile.Path to __all__ (GH-19115) (GH-19116) Message-ID: https://github.com/python/cpython/commit/5c1d745da5e1166a8724b619060165dcf3949e93 commit: 5c1d745da5e1166a8724b619060165dcf3949e93 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-26T02:44:57-04:00 summary: bpo-39830: Add zipfile.Path to __all__ (GH-19115) (GH-19116) (cherry picked from commit 9a81ab107a54b8ca320fb703f7c68e14ccd9d016) Co-authored-by: Zackery Spytz Co-authored-by: Zackery Spytz files: A Misc/NEWS.d/next/Library/2020-03-23-05-21-13.bpo-39830.IkqU1Y.rst M Lib/zipfile.py diff --git a/Lib/zipfile.py b/Lib/zipfile.py index 07faaccac9226..f7a2a2e8b8ab9 100644 --- a/Lib/zipfile.py +++ b/Lib/zipfile.py @@ -37,7 +37,8 @@ __all__ = ["BadZipFile", "BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "ZIP_BZIP2", "ZIP_LZMA", - "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile"] + "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", + "Path"] class BadZipFile(Exception): pass diff --git a/Misc/NEWS.d/next/Library/2020-03-23-05-21-13.bpo-39830.IkqU1Y.rst b/Misc/NEWS.d/next/Library/2020-03-23-05-21-13.bpo-39830.IkqU1Y.rst new file mode 100644 index 0000000000000..fc9c650cc39f2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-03-23-05-21-13.bpo-39830.IkqU1Y.rst @@ -0,0 +1 @@ +Add :class:`zipfile.Path` to ``__all__`` in the :mod:`zipfile` module. From webhook-mailer at python.org Tue May 26 04:04:22 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Tue, 26 May 2020 08:04:22 -0000 Subject: [Python-checkins] Remove duplicated words words (GH-20413) Message-ID: https://github.com/python/cpython/commit/1c5d1d7304a119040fd3118128bdb412f0cce6a6 commit: 1c5d1d7304a119040fd3118128bdb412f0cce6a6 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-26T01:04:14-07:00 summary: Remove duplicated words words (GH-20413) files: M Doc/library/ssl.rst M Doc/whatsnew/3.8.rst M Doc/whatsnew/3.9.rst M Lib/test/support/interpreters.rst M Misc/NEWS.d/3.8.0a1.rst M Misc/NEWS.d/3.8.0b1.rst diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 952ee1653b924..852091c02ec9a 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -615,7 +615,7 @@ Constants Possible value for :attr:`SSLContext.verify_flags`. In this mode, only the peer cert is checked but none of the intermediate CA certificates. The mode requires a valid CRL that is signed by the peer cert's issuer (its direct - ancestor CA). If no proper CRL has has been loaded with + ancestor CA). If no proper CRL has been loaded with :attr:`SSLContext.load_verify_locations`, validation will fail. .. versionadded:: 3.4 diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index fdfc0a8f472cd..b6ed2da36889a 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -1692,7 +1692,7 @@ Deprecated :meth:`~gettext.NullTranslations.set_output_charset`, and the *codeset* parameter of functions :func:`~gettext.translation` and :func:`~gettext.install` are also deprecated, since they are only used for - for the ``l*gettext()`` functions. + the ``l*gettext()`` functions. (Contributed by Serhiy Storchaka in :issue:`33710`.) * The :meth:`~threading.Thread.isAlive()` method of :class:`threading.Thread` diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index ebb24ebb026fc..d72fea2c67968 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -868,7 +868,7 @@ Removed (Contributed by Victor Stinner in :issue:`39489`.) * The ``_field_types`` attribute of the :class:`typing.NamedTuple` class - has been removed. It was deprecated deprecated since Python 3.8. Use + has been removed. It was deprecated since Python 3.8. Use the ``__annotations__`` attribute instead. (Contributed by Serhiy Storchaka in :issue:`40182`.) diff --git a/Lib/test/support/interpreters.rst b/Lib/test/support/interpreters.rst index 9a05eb67520c8..37a60b1072a66 100644 --- a/Lib/test/support/interpreters.rst +++ b/Lib/test/support/interpreters.rst @@ -127,7 +127,7 @@ This module also defines the following exceptions. .. exception:: ChannelNotFoundError This exception is a subclass of :exc:`ChannelError`, and is raised - when the the identified channel is not found. + when the identified channel is not found. .. exception:: ChannelEmptyError diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index c5ab5f4489c92..b7164a5b4e0fa 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -7034,7 +7034,7 @@ Fix ftplib test for TLS 1.3 by reading from data socket. .. nonce: g7TwYm .. section: Tests -Fix `test_socket` on AIX AIX 6.1 and later IPv6 zone id supports only +Fix `test_socket` on AIX 6.1 and later IPv6 zone id supports only supported by inet_pton6_zone() Switch to runtime-based platform.system() to establish current platform rather than build-time based sys.platform() diff --git a/Misc/NEWS.d/3.8.0b1.rst b/Misc/NEWS.d/3.8.0b1.rst index 43a88a37c5cb0..5400c07795292 100644 --- a/Misc/NEWS.d/3.8.0b1.rst +++ b/Misc/NEWS.d/3.8.0b1.rst @@ -1612,7 +1612,7 @@ versions. .. nonce: Zot4sx .. section: Documentation -Improve documentation of the stdin, stdout, and stderr arguments of of the +Improve documentation of the stdin, stdout, and stderr arguments of the ``asyncio.subprocess_exec`` function to specify which values are supported. Also mention that decoding as text is not supported. From webhook-mailer at python.org Tue May 26 04:16:42 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Tue, 26 May 2020 08:16:42 -0000 Subject: [Python-checkins] bpo-39301: State that floor division is used for right shift operations (GH-20347) Message-ID: https://github.com/python/cpython/commit/af7553ac95a96713be847dd45bc5a8aeb0a75955 commit: af7553ac95a96713be847dd45bc5a8aeb0a75955 branch: master author: Zackery Spytz committer: GitHub date: 2020-05-26T09:16:34+01:00 summary: bpo-39301: State that floor division is used for right shift operations (GH-20347) * bpo-39301: State that floor division is used for right shift operations * Remove "without overflow check" files: M Doc/library/stdtypes.rst diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 4e7729c83f49a..6a9fdcb38d24b 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -434,12 +434,10 @@ Notes: Negative shift counts are illegal and cause a :exc:`ValueError` to be raised. (2) - A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)`` - without overflow check. + A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)``. (3) - A right shift by *n* bits is equivalent to division by ``pow(2, n)`` without - overflow check. + A right shift by *n* bits is equivalent to floor division by ``pow(2, n)``. (4) Performing these calculations with at least one extra sign extension bit in From webhook-mailer at python.org Tue May 26 04:33:47 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 26 May 2020 08:33:47 -0000 Subject: [Python-checkins] bpo-39301: State that floor division is used for right shift operations (GH-20347) (GH-20415) Message-ID: https://github.com/python/cpython/commit/c2a177adf3575d4eb81030fba851f78d7a8e3f51 commit: c2a177adf3575d4eb81030fba851f78d7a8e3f51 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-26T09:33:42+01:00 summary: bpo-39301: State that floor division is used for right shift operations (GH-20347) (GH-20415) * bpo-39301: State that floor division is used for right shift operations * Remove "without overflow check" (cherry picked from commit af7553ac95a96713be847dd45bc5a8aeb0a75955) Co-authored-by: Zackery Spytz files: M Doc/library/stdtypes.rst diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 8cc57c30c322c..ccd2f99ccb34f 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -434,12 +434,10 @@ Notes: Negative shift counts are illegal and cause a :exc:`ValueError` to be raised. (2) - A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)`` - without overflow check. + A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)``. (3) - A right shift by *n* bits is equivalent to division by ``pow(2, n)`` without - overflow check. + A right shift by *n* bits is equivalent to floor division by ``pow(2, n)``. (4) Performing these calculations with at least one extra sign extension bit in From webhook-mailer at python.org Tue May 26 04:34:08 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 26 May 2020 08:34:08 -0000 Subject: [Python-checkins] bpo-39301: State that floor division is used for right shift operations (GH-20347) (GH-20416) Message-ID: https://github.com/python/cpython/commit/b068d892c1ba7b996e43aceb974bfadac3c577ed commit: b068d892c1ba7b996e43aceb974bfadac3c577ed branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-26T09:34:04+01:00 summary: bpo-39301: State that floor division is used for right shift operations (GH-20347) (GH-20416) * bpo-39301: State that floor division is used for right shift operations * Remove "without overflow check" (cherry picked from commit af7553ac95a96713be847dd45bc5a8aeb0a75955) Co-authored-by: Zackery Spytz Co-authored-by: Zackery Spytz files: M Doc/library/stdtypes.rst diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index c4c4ccd76d5a7..c35cb2e11d32d 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -436,12 +436,10 @@ Notes: Negative shift counts are illegal and cause a :exc:`ValueError` to be raised. (2) - A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)`` - without overflow check. + A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)``. (3) - A right shift by *n* bits is equivalent to division by ``pow(2, n)`` without - overflow check. + A right shift by *n* bits is equivalent to floor division by ``pow(2, n)``. (4) Performing these calculations with at least one extra sign extension bit in From webhook-mailer at python.org Tue May 26 04:57:19 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Tue, 26 May 2020 08:57:19 -0000 Subject: [Python-checkins] [3.8] bpo-35714: Reject null characters in struct format strings (GH-16928) (GH-20419) Message-ID: https://github.com/python/cpython/commit/5ff5edfef63b3dbc1abb004b3fa4b3db87e79ff9 commit: 5ff5edfef63b3dbc1abb004b3fa4b3db87e79ff9 branch: 3.8 author: Zackery Spytz committer: GitHub date: 2020-05-26T11:57:09+03:00 summary: [3.8] bpo-35714: Reject null characters in struct format strings (GH-16928) (GH-20419) struct.error is now raised if there is a null character in a struct format string. (cherry picked from commit 3f59b55316f4c6ab451997902579aa69020b537c) files: A Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst M Lib/test/test_struct.py M Modules/_struct.c diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py index 454082e66d3f8..67e7c559d9f1d 100644 --- a/Lib/test/test_struct.py +++ b/Lib/test/test_struct.py @@ -652,6 +652,13 @@ def test_format_attr(self): s2 = struct.Struct(s.format.encode()) self.assertEqual(s2.format, s.format) + def test_issue35714(self): + # Embedded null characters should not be allowed in format strings. + for s in '\0', '2\0i', b'\0': + with self.assertRaisesRegex(struct.error, + 'embedded null character'): + struct.calcsize(s) + class UnpackIteratorTest(unittest.TestCase): """ diff --git a/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst b/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst new file mode 100644 index 0000000000000..39102065ca7b5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst @@ -0,0 +1,2 @@ +:exc:`struct.error` is now raised if there is a null character in a +:mod:`struct` format string. diff --git a/Modules/_struct.c b/Modules/_struct.c index 1c917b7513f46..64a9827e83aae 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -1285,6 +1285,10 @@ prepare_s(PyStructObject *self) size_t ncodes; fmt = PyBytes_AS_STRING(self->s_format); + if (strlen(fmt) != (size_t)PyBytes_GET_SIZE(self->s_format)) { + PyErr_SetString(StructError, "embedded null character"); + return -1; + } f = whichtable(&fmt); From webhook-mailer at python.org Tue May 26 05:16:43 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 26 May 2020 09:16:43 -0000 Subject: [Python-checkins] [3.8] bpo-35714: Reject null characters in struct format strings (GH-16928) (GH-20419) Message-ID: https://github.com/python/cpython/commit/4ea802868460fad54e40cb99eb0ca283b3b293f0 commit: 4ea802868460fad54e40cb99eb0ca283b3b293f0 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-26T02:16:36-07:00 summary: [3.8] bpo-35714: Reject null characters in struct format strings (GH-16928) (GH-20419) struct.error is now raised if there is a null character in a struct format string. (cherry picked from commit 3f59b55316f4c6ab451997902579aa69020b537c) (cherry picked from commit 5ff5edfef63b3dbc1abb004b3fa4b3db87e79ff9) Co-authored-by: Zackery Spytz files: A Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst M Lib/test/test_struct.py M Modules/_struct.c diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py index 8fd56c91cb7a4..104f4d30c9e1d 100644 --- a/Lib/test/test_struct.py +++ b/Lib/test/test_struct.py @@ -626,6 +626,13 @@ def test_format_attr(self): s2 = struct.Struct(s.format.encode()) self.assertEqual(s2.format, s.format) + def test_issue35714(self): + # Embedded null characters should not be allowed in format strings. + for s in '\0', '2\0i', b'\0': + with self.assertRaisesRegex(struct.error, + 'embedded null character'): + struct.calcsize(s) + class UnpackIteratorTest(unittest.TestCase): """ diff --git a/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst b/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst new file mode 100644 index 0000000000000..39102065ca7b5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst @@ -0,0 +1,2 @@ +:exc:`struct.error` is now raised if there is a null character in a +:mod:`struct` format string. diff --git a/Modules/_struct.c b/Modules/_struct.c index c09951dcb79f3..4bde0ce9f0658 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -1285,6 +1285,10 @@ prepare_s(PyStructObject *self) size_t ncodes; fmt = PyBytes_AS_STRING(self->s_format); + if (strlen(fmt) != (size_t)PyBytes_GET_SIZE(self->s_format)) { + PyErr_SetString(StructError, "embedded null character"); + return -1; + } f = whichtable(&fmt); From webhook-mailer at python.org Tue May 26 06:26:33 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Tue, 26 May 2020 10:26:33 -0000 Subject: [Python-checkins] bpo-40637: Fix test_pbkdf2_hmac_py for missing sha1 (#20422) Message-ID: https://github.com/python/cpython/commit/be63019ed726b2da045bf232782062830bb6c27d commit: be63019ed726b2da045bf232782062830bb6c27d branch: master author: Christian Heimes committer: GitHub date: 2020-05-26T12:26:29+02:00 summary: bpo-40637: Fix test_pbkdf2_hmac_py for missing sha1 (#20422) files: M Lib/test/test_hashlib.py diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py index d40acd5889913..6088307f8410b 100644 --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -1004,17 +1004,31 @@ def _test_pbkdf2_hmac(self, pbkdf2, supported): self.assertEqual(out, expected, (digest_name, password, salt, rounds)) - self.assertRaises(TypeError, pbkdf2, b'sha1', b'pass', b'salt', 1) - self.assertRaises(TypeError, pbkdf2, 'sha1', 'pass', 'salt', 1) - self.assertRaises(ValueError, pbkdf2, 'sha1', b'pass', b'salt', 0) - self.assertRaises(ValueError, pbkdf2, 'sha1', b'pass', b'salt', -1) - self.assertRaises(ValueError, pbkdf2, 'sha1', b'pass', b'salt', 1, 0) - self.assertRaises(ValueError, pbkdf2, 'sha1', b'pass', b'salt', 1, -1) with self.assertRaisesRegex(ValueError, 'unsupported hash type'): pbkdf2('unknown', b'pass', b'salt', 1) - out = pbkdf2(hash_name='sha1', password=b'password', salt=b'salt', - iterations=1, dklen=None) - self.assertEqual(out, self.pbkdf2_results['sha1'][0][0]) + + if 'sha1' in supported: + self.assertRaises( + TypeError, pbkdf2, b'sha1', b'pass', b'salt', 1 + ) + self.assertRaises( + TypeError, pbkdf2, 'sha1', 'pass', 'salt', 1 + ) + self.assertRaises( + ValueError, pbkdf2, 'sha1', b'pass', b'salt', 0 + ) + self.assertRaises( + ValueError, pbkdf2, 'sha1', b'pass', b'salt', -1 + ) + self.assertRaises( + ValueError, pbkdf2, 'sha1', b'pass', b'salt', 1, 0 + ) + self.assertRaises( + ValueError, pbkdf2, 'sha1', b'pass', b'salt', 1, -1 + ) + out = pbkdf2(hash_name='sha1', password=b'password', salt=b'salt', + iterations=1, dklen=None) + self.assertEqual(out, self.pbkdf2_results['sha1'][0][0]) def test_pbkdf2_hmac_py(self): self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac, builtin_hashes) From webhook-mailer at python.org Tue May 26 08:18:23 2020 From: webhook-mailer at python.org (Erlend Egeberg Aasland) Date: Tue, 26 May 2020 12:18:23 -0000 Subject: [Python-checkins] bpo-40737: Fix possible reference leak for sqlite3 initialization (GH-20323) Message-ID: https://github.com/python/cpython/commit/5eb45d7d4e812e89d77da84cc619e9db81561a34 commit: 5eb45d7d4e812e89d77da84cc619e9db81561a34 branch: master author: Erlend Egeberg Aasland committer: GitHub date: 2020-05-26T21:18:19+09:00 summary: bpo-40737: Fix possible reference leak for sqlite3 initialization (GH-20323) files: A Misc/NEWS.d/next/Library/2020-05-23-00-22-11.bpo-40737.iph-CM.rst M Modules/_sqlite/module.c diff --git a/Misc/NEWS.d/next/Library/2020-05-23-00-22-11.bpo-40737.iph-CM.rst b/Misc/NEWS.d/next/Library/2020-05-23-00-22-11.bpo-40737.iph-CM.rst new file mode 100644 index 0000000000000..f068d3a091a03 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-23-00-22-11.bpo-40737.iph-CM.rst @@ -0,0 +1 @@ +Fix possible reference leak for :mod:`sqlite3` initialization. diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c index 4d9d3d41c7b71..71d951ee887e4 100644 --- a/Modules/_sqlite/module.c +++ b/Modules/_sqlite/module.c @@ -346,6 +346,14 @@ static struct PyModuleDef _sqlite3module = { NULL }; +#define ADD_TYPE(module, type) \ +do { \ + if (PyModule_AddType(module, &type) < 0) { \ + Py_DECREF(module); \ + return NULL; \ + } \ +} while (0) + PyMODINIT_FUNC PyInit__sqlite3(void) { PyObject *module, *dict; @@ -366,14 +374,10 @@ PyMODINIT_FUNC PyInit__sqlite3(void) return NULL; } - Py_INCREF(&pysqlite_ConnectionType); - PyModule_AddObject(module, "Connection", (PyObject*) &pysqlite_ConnectionType); - Py_INCREF(&pysqlite_CursorType); - PyModule_AddObject(module, "Cursor", (PyObject*) &pysqlite_CursorType); - Py_INCREF(&pysqlite_PrepareProtocolType); - PyModule_AddObject(module, "PrepareProtocol", (PyObject*) &pysqlite_PrepareProtocolType); - Py_INCREF(&pysqlite_RowType); - PyModule_AddObject(module, "Row", (PyObject*) &pysqlite_RowType); + ADD_TYPE(module, pysqlite_ConnectionType); + ADD_TYPE(module, pysqlite_CursorType); + ADD_TYPE(module, pysqlite_PrepareProtocolType); + ADD_TYPE(module, pysqlite_RowType); if (!(dict = PyModule_GetDict(module))) { goto error; From webhook-mailer at python.org Tue May 26 08:59:31 2020 From: webhook-mailer at python.org (Tal Einat) Date: Tue, 26 May 2020 12:59:31 -0000 Subject: [Python-checkins] [3.7] bpo-38580: Document that select() accepts iterables, not just sequences (GH-16832) Message-ID: https://github.com/python/cpython/commit/3f215f35bdb9d666f5a692fc60f800da1bb1e4a9 commit: 3f215f35bdb9d666f5a692fc60f800da1bb1e4a9 branch: 3.7 author: Tal Einat committer: GitHub date: 2020-05-26T15:59:23+03:00 summary: [3.7] bpo-38580: Document that select() accepts iterables, not just sequences (GH-16832) (cherry picked from commit 372ee27d4958302dac7ad6a8711f6fd04771b2e6) Co-authored-by: Jakub Stasiak files: M Doc/library/select.rst M Modules/selectmodule.c diff --git a/Doc/library/select.rst b/Doc/library/select.rst index 7d65363e49313..0bc8800d25f11 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -117,7 +117,7 @@ The module defines the following: .. function:: select(rlist, wlist, xlist[, timeout]) This is a straightforward interface to the Unix :c:func:`select` system call. - The first three arguments are sequences of 'waitable objects': either + The first three arguments are iterables of 'waitable objects': either integers representing file descriptors or objects with a parameterless method named :meth:`~io.IOBase.fileno` returning such an integer: @@ -126,7 +126,7 @@ The module defines the following: * *xlist*: wait for an "exceptional condition" (see the manual page for what your system considers such a condition) - Empty sequences are allowed, but acceptance of three empty sequences is + Empty iterables are allowed, but acceptance of three empty iterables is platform-dependent. (It is known to work on Unix but not on Windows.) The optional *timeout* argument specifies a time-out as a floating point number in seconds. When the *timeout* argument is omitted the function blocks until @@ -141,7 +141,7 @@ The module defines the following: single: socket() (in module socket) single: popen() (in module os) - Among the acceptable object types in the sequences are Python :term:`file + Among the acceptable object types in the iterables are Python :term:`file objects ` (e.g. ``sys.stdin``, or objects returned by :func:`open` or :func:`os.popen`), socket objects returned by :func:`socket.socket`. You may also define a :dfn:`wrapper` class yourself, diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 93d896a37c223..d89dfc0e716b9 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -243,7 +243,7 @@ select_select(PyObject *self, PyObject *args) } #endif /* SELECT_USES_HEAP */ - /* Convert sequences to fd_sets, and get maximum fd number + /* Convert iterables to fd_sets, and get maximum fd number * propagates the Python exception set in seq2set() */ rfd2obj[0].sentinel = -1; @@ -2360,7 +2360,7 @@ PyDoc_STRVAR(select_doc, "select(rlist, wlist, xlist[, timeout]) -> (rlist, wlist, xlist)\n\ \n\ Wait until one or more file descriptors are ready for some kind of I/O.\n\ -The first three arguments are sequences of file descriptors to be waited for:\n\ +The first three arguments are iterables of file descriptors to be waited for:\n\ rlist -- wait until ready for reading\n\ wlist -- wait until ready for writing\n\ xlist -- wait for an ``exceptional condition''\n\ From webhook-mailer at python.org Tue May 26 08:59:36 2020 From: webhook-mailer at python.org (Tal Einat) Date: Tue, 26 May 2020 12:59:36 -0000 Subject: [Python-checkins] [3.8] bpo-38580: Document that select() accepts iterables, not just sequences (GH-16832) Message-ID: https://github.com/python/cpython/commit/e3e800f3d28881cc9de38cd9bcbcf8fbdea238a6 commit: e3e800f3d28881cc9de38cd9bcbcf8fbdea238a6 branch: 3.8 author: Tal Einat committer: GitHub date: 2020-05-26T15:59:32+03:00 summary: [3.8] bpo-38580: Document that select() accepts iterables, not just sequences (GH-16832) (cherry picked from commit 372ee27d4958302dac7ad6a8711f6fd04771b2e6) Co-authored-by: Jakub Stasiak files: M Doc/library/select.rst M Modules/clinic/selectmodule.c.h M Modules/selectmodule.c diff --git a/Doc/library/select.rst b/Doc/library/select.rst index 8f5a2cea9257c..39622aadf3fa5 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -117,7 +117,7 @@ The module defines the following: .. function:: select(rlist, wlist, xlist[, timeout]) This is a straightforward interface to the Unix :c:func:`select` system call. - The first three arguments are sequences of 'waitable objects': either + The first three arguments are iterables of 'waitable objects': either integers representing file descriptors or objects with a parameterless method named :meth:`~io.IOBase.fileno` returning such an integer: @@ -126,7 +126,7 @@ The module defines the following: * *xlist*: wait for an "exceptional condition" (see the manual page for what your system considers such a condition) - Empty sequences are allowed, but acceptance of three empty sequences is + Empty iterables are allowed, but acceptance of three empty iterables is platform-dependent. (It is known to work on Unix but not on Windows.) The optional *timeout* argument specifies a time-out as a floating point number in seconds. When the *timeout* argument is omitted the function blocks until @@ -141,7 +141,7 @@ The module defines the following: single: socket() (in module socket) single: popen() (in module os) - Among the acceptable object types in the sequences are Python :term:`file + Among the acceptable object types in the iterables are Python :term:`file objects ` (e.g. ``sys.stdin``, or objects returned by :func:`open` or :func:`os.popen`), socket objects returned by :func:`socket.socket`. You may also define a :dfn:`wrapper` class yourself, diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h index 9015816f80c14..51855d96c37d7 100644 --- a/Modules/clinic/selectmodule.c.h +++ b/Modules/clinic/selectmodule.c.h @@ -8,7 +8,7 @@ PyDoc_STRVAR(select_select__doc__, "\n" "Wait until one or more file descriptors are ready for some kind of I/O.\n" "\n" -"The first three arguments are sequences of file descriptors to be waited for:\n" +"The first three arguments are iterables of file descriptors to be waited for:\n" "rlist -- wait until ready for reading\n" "wlist -- wait until ready for writing\n" "xlist -- wait for an \"exceptional condition\"\n" @@ -1215,4 +1215,4 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize #ifndef SELECT_KQUEUE_CONTROL_METHODDEF #define SELECT_KQUEUE_CONTROL_METHODDEF #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ -/*[clinic end generated code: output=03041f3d09b04a3d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=9b4b1e1cae1f3afb input=a9049054013a1b77]*/ diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index ed71d8b0d5985..31b64c4c8a45a 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -218,7 +218,7 @@ select.select Wait until one or more file descriptors are ready for some kind of I/O. -The first three arguments are sequences of file descriptors to be waited for: +The first three arguments are iterables of file descriptors to be waited for: rlist -- wait until ready for reading wlist -- wait until ready for writing xlist -- wait for an "exceptional condition" @@ -243,7 +243,7 @@ descriptors can be used. static PyObject * select_select_impl(PyObject *module, PyObject *rlist, PyObject *wlist, PyObject *xlist, PyObject *timeout_obj) -/*[clinic end generated code: output=2b3cfa824f7ae4cf input=177e72184352df25]*/ +/*[clinic end generated code: output=2b3cfa824f7ae4cf input=e467f5d68033de00]*/ { #ifdef SELECT_USES_HEAP pylist *rfd2obj, *wfd2obj, *efd2obj; @@ -299,7 +299,7 @@ select_select_impl(PyObject *module, PyObject *rlist, PyObject *wlist, } #endif /* SELECT_USES_HEAP */ - /* Convert sequences to fd_sets, and get maximum fd number + /* Convert iterables to fd_sets, and get maximum fd number * propagates the Python exception set in seq2set() */ rfd2obj[0].sentinel = -1; From webhook-mailer at python.org Tue May 26 10:54:29 2020 From: webhook-mailer at python.org (idomic) Date: Tue, 26 May 2020 14:54:29 -0000 Subject: [Python-checkins] bpo-39244: multiprocessing return default start method first on macOS (GH-18625) Message-ID: https://github.com/python/cpython/commit/db098bc1f05bd0773943e59f83489f05f28dedf8 commit: db098bc1f05bd0773943e59f83489f05f28dedf8 branch: master author: idomic committer: GitHub date: 2020-05-26T17:54:21+03:00 summary: bpo-39244: multiprocessing return default start method first on macOS (GH-18625) files: A Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst M Lib/multiprocessing/context.py M Lib/test/_test_multiprocessing.py diff --git a/Lib/multiprocessing/context.py b/Lib/multiprocessing/context.py index 5f8e0f0cd4658..8d0525d5d6217 100644 --- a/Lib/multiprocessing/context.py +++ b/Lib/multiprocessing/context.py @@ -257,10 +257,11 @@ def get_all_start_methods(self): if sys.platform == 'win32': return ['spawn'] else: + methods = ['spawn', 'fork'] if sys.platform == 'darwin' else ['fork', 'spawn'] if reduction.HAVE_SEND_HANDLE: - return ['fork', 'spawn', 'forkserver'] - else: - return ['fork', 'spawn'] + methods.append('forkserver') + return methods + # # Context types for fixed start method diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index dc8164f3288e1..155a8276e7507 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -5039,7 +5039,9 @@ def test_get_all(self): self.assertEqual(methods, ['spawn']) else: self.assertTrue(methods == ['fork', 'spawn'] or - methods == ['fork', 'spawn', 'forkserver']) + methods == ['spawn', 'fork'] or + methods == ['fork', 'spawn', 'forkserver'] or + methods == ['spawn', 'fork', 'forkserver']) def test_preload_resources(self): if multiprocessing.get_start_method() != 'forkserver': diff --git a/Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst b/Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst new file mode 100644 index 0000000000000..c7d8e0de676b5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst @@ -0,0 +1,2 @@ +Fixed :class:`multiprocessing.context.get_all_start_methods` +to properly return the default method first on macOS. From webhook-mailer at python.org Tue May 26 10:55:26 2020 From: webhook-mailer at python.org (Arturo Escaip) Date: Tue, 26 May 2020 14:55:26 -0000 Subject: [Python-checkins] bpo-40756: Default second argument of LoggerAdapter.__init__ to None (GH-20362) Message-ID: https://github.com/python/cpython/commit/8ad052464a4e0aef9a11663b80f187087b773592 commit: 8ad052464a4e0aef9a11663b80f187087b773592 branch: master author: Arturo Escaip committer: GitHub date: 2020-05-26T07:55:21-07:00 summary: bpo-40756: Default second argument of LoggerAdapter.__init__ to None (GH-20362) The 'extra' argument is not always used by custom logger adapters. For example: ```python class IndentAdapter(logging.LoggerAdapter): def process(self, msg, kwargs): indent = kwargs.pop(indent, 1) return ' ' * indent + msg, kwargs ``` It is cleaner and friendlier to default the 'extra' argument to None instead of either forcing the subclasses of LoggerAdapter to pass a None value directly or to override the constructor. This change is backward compatible because existing calls to `LoggerAdapter.__init__` are already passing a value for the second argument. Automerge-Triggered-By: @vsajip files: A Misc/NEWS.d/next/Library/2020-05-24-11-06-37.bpo-40756.7ZH83z.rst M Lib/logging/__init__.py diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 403dc81b13ef4..6d27301a7056e 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -1751,7 +1751,7 @@ class LoggerAdapter(object): information in logging output. """ - def __init__(self, logger, extra): + def __init__(self, logger, extra=None): """ Initialize the adapter with a logger and a dict-like object which provides contextual information. This constructor signature allows diff --git a/Misc/NEWS.d/next/Library/2020-05-24-11-06-37.bpo-40756.7ZH83z.rst b/Misc/NEWS.d/next/Library/2020-05-24-11-06-37.bpo-40756.7ZH83z.rst new file mode 100644 index 0000000000000..a970f5be156f5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-24-11-06-37.bpo-40756.7ZH83z.rst @@ -0,0 +1,2 @@ +The second argument (extra) of ``LoggerAdapter.__init__`` now defaults to +None. From webhook-mailer at python.org Tue May 26 11:14:05 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 26 May 2020 15:14:05 -0000 Subject: [Python-checkins] bpo-39244: multiprocessing return default start method first on macOS (GH-18625) Message-ID: https://github.com/python/cpython/commit/285ff63351bb5a42099527c283f65434e761be83 commit: 285ff63351bb5a42099527c283f65434e761be83 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-26T08:13:59-07:00 summary: bpo-39244: multiprocessing return default start method first on macOS (GH-18625) (cherry picked from commit db098bc1f05bd0773943e59f83489f05f28dedf8) Co-authored-by: idomic files: A Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst M Lib/multiprocessing/context.py M Lib/test/_test_multiprocessing.py diff --git a/Lib/multiprocessing/context.py b/Lib/multiprocessing/context.py index 5f8e0f0cd4658..8d0525d5d6217 100644 --- a/Lib/multiprocessing/context.py +++ b/Lib/multiprocessing/context.py @@ -257,10 +257,11 @@ def get_all_start_methods(self): if sys.platform == 'win32': return ['spawn'] else: + methods = ['spawn', 'fork'] if sys.platform == 'darwin' else ['fork', 'spawn'] if reduction.HAVE_SEND_HANDLE: - return ['fork', 'spawn', 'forkserver'] - else: - return ['fork', 'spawn'] + methods.append('forkserver') + return methods + # # Context types for fixed start method diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index ff58481f00314..d5cccac16f451 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -5007,7 +5007,9 @@ def test_get_all(self): self.assertEqual(methods, ['spawn']) else: self.assertTrue(methods == ['fork', 'spawn'] or - methods == ['fork', 'spawn', 'forkserver']) + methods == ['spawn', 'fork'] or + methods == ['fork', 'spawn', 'forkserver'] or + methods == ['spawn', 'fork', 'forkserver']) def test_preload_resources(self): if multiprocessing.get_start_method() != 'forkserver': diff --git a/Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst b/Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst new file mode 100644 index 0000000000000..c7d8e0de676b5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst @@ -0,0 +1,2 @@ +Fixed :class:`multiprocessing.context.get_all_start_methods` +to properly return the default method first on macOS. From webhook-mailer at python.org Tue May 26 11:43:50 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Tue, 26 May 2020 15:43:50 -0000 Subject: [Python-checkins] bpo-37999: No longer use __int__ in implicit integer conversions. (GH-15636) Message-ID: https://github.com/python/cpython/commit/578c3955e0222ec7b3146197467fbb0fcfae12fe commit: 578c3955e0222ec7b3146197467fbb0fcfae12fe branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-26T18:43:38+03:00 summary: bpo-37999: No longer use __int__ in implicit integer conversions. (GH-15636) Only __index__ should be used to make integer conversions lossless. files: A Misc/NEWS.d/next/Core and Builtins/2019-09-01-14-26-02.bpo-37999.XPl6dn.rst M Doc/whatsnew/3.10.rst M Include/longobject.h M Lib/ctypes/test/test_numbers.py M Lib/datetime.py M Lib/test/clinic.test M Lib/test/datetimetester.py M Lib/test/test_getargs2.py M Lib/test/test_grp.py M Lib/test/test_int.py M Lib/test/test_math.py M Lib/test/test_socket.py M Modules/_blake2/clinic/blake2b_impl.c.h M Modules/_blake2/clinic/blake2s_impl.c.h M Modules/_ctypes/cfield.c M Modules/_io/clinic/_iomodule.c.h M Modules/_io/clinic/bufferedio.c.h M Modules/_io/clinic/bytesio.c.h M Modules/_io/clinic/fileio.c.h M Modules/_io/clinic/iobase.c.h M Modules/_io/clinic/stringio.c.h M Modules/_io/clinic/textio.c.h M Modules/_io/clinic/winconsoleio.c.h M Modules/_io/fileio.c M Modules/_io/winconsoleio.c M Modules/_multiprocessing/clinic/posixshmem.c.h M Modules/arraymodule.c M Modules/cjkcodecs/clinic/multibytecodec.c.h M Modules/clinic/_bisectmodule.c.h M Modules/clinic/_bz2module.c.h M Modules/clinic/_codecsmodule.c.h M Modules/clinic/_collectionsmodule.c.h M Modules/clinic/_curses_panel.c.h M Modules/clinic/_cursesmodule.c.h M Modules/clinic/_datetimemodule.c.h M Modules/clinic/_dbmmodule.c.h M Modules/clinic/_elementtree.c.h M Modules/clinic/_gdbmmodule.c.h M Modules/clinic/_hashopenssl.c.h M Modules/clinic/_lzmamodule.c.h M Modules/clinic/_opcode.c.h M Modules/clinic/_operator.c.h M Modules/clinic/_randommodule.c.h M Modules/clinic/_sre.c.h M Modules/clinic/_ssl.c.h M Modules/clinic/_struct.c.h M Modules/clinic/_tkinter.c.h M Modules/clinic/_tracemalloc.c.h M Modules/clinic/arraymodule.c.h M Modules/clinic/audioop.c.h M Modules/clinic/binascii.c.h M Modules/clinic/fcntlmodule.c.h M Modules/clinic/gcmodule.c.h M Modules/clinic/itertoolsmodule.c.h M Modules/clinic/posixmodule.c.h M Modules/clinic/pyexpat.c.h M Modules/clinic/resource.c.h M Modules/clinic/selectmodule.c.h M Modules/clinic/signalmodule.c.h M Modules/clinic/zlibmodule.c.h M Modules/grpmodule.c M Modules/mathmodule.c M Modules/posixmodule.c M Modules/socketmodule.c M Modules/zlibmodule.c M Objects/abstract.c M Objects/clinic/bytearrayobject.c.h M Objects/clinic/bytesobject.c.h M Objects/clinic/codeobject.c.h M Objects/clinic/listobject.c.h M Objects/clinic/longobject.c.h M Objects/clinic/memoryobject.c.h M Objects/clinic/typeobject.c.h M Objects/clinic/unicodeobject.c.h M Objects/longobject.c M Objects/stringlib/clinic/transmogrify.h.h M PC/clinic/msvcrtmodule.c.h M PC/clinic/winreg.c.h M PC/clinic/winsound.c.h M Python/clinic/_warnings.c.h M Python/clinic/bltinmodule.c.h M Python/clinic/import.c.h M Python/clinic/marshal.c.h M Python/clinic/sysmodule.c.h M Python/clinic/traceback.c.h M Python/getargs.c M Tools/clinic/clinic.py diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 98a231f80aaf2..fabd9a2463e27 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -75,6 +75,12 @@ New Features Other Language Changes ====================== +* Builtin and extension functions that take integer arguments no longer accept + :class:`~decimal.Decimal`\ s, :class:`~fractions.Fraction`\ s and other + objects that can be converted to integers only with a loss (e.g. that have + the :meth:`~object.__int__` method but do not have the + :meth:`~object.__index__` method). + (Contributed by Serhiy Storchaka in :issue:`37999`.) New Modules diff --git a/Include/longobject.h b/Include/longobject.h index 1b288099da8c8..dad08c23f8211 100644 --- a/Include/longobject.h +++ b/Include/longobject.h @@ -173,23 +173,6 @@ PyAPI_FUNC(int) _PyLong_AsByteArray(PyLongObject* v, unsigned char* bytes, size_t n, int little_endian, int is_signed); -/* _PyLong_FromNbInt: Convert the given object to a PyLongObject - using the nb_int slot, if available. Raise TypeError if either the - nb_int slot is not available or the result of the call to nb_int - returns something not of type int. -*/ -PyAPI_FUNC(PyObject *) _PyLong_FromNbInt(PyObject *); - -/* Convert the given object to a PyLongObject using the nb_index or - nb_int slots, if available (the latter is deprecated). - Raise TypeError if either nb_index and nb_int slots are not - available or the result of the call to nb_index or nb_int - returns something not of type int. - Should be replaced with PyNumber_Index after the end of the - deprecation period. -*/ -PyAPI_FUNC(PyObject *) _PyLong_FromNbIndexOrNbInt(PyObject *); - /* _PyLong_Format: Convert the long to a string object with given base, appending a base prefix of 0[box] if base is 2, 8 or 16. */ PyAPI_FUNC(PyObject *) _PyLong_Format(PyObject *obj, int base); diff --git a/Lib/ctypes/test/test_numbers.py b/Lib/ctypes/test/test_numbers.py index c6d843b2cd987..db500e812beb1 100644 --- a/Lib/ctypes/test/test_numbers.py +++ b/Lib/ctypes/test/test_numbers.py @@ -134,8 +134,7 @@ def __index__(self): for t in signed_types + unsigned_types: self.assertRaises(TypeError, t, 3.14) self.assertRaises(TypeError, t, f) - with self.assertWarns(DeprecationWarning): - self.assertEqual(t(d).value, 2) + self.assertRaises(TypeError, t, d) self.assertEqual(t(i).value, 2) def test_sizes(self): diff --git a/Lib/datetime.py b/Lib/datetime.py index 952aebfdec0a7..3090978508c92 100644 --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -11,6 +11,7 @@ import time as _time import math as _math import sys +from operator import index as _index def _cmp(x, y): return 0 if x == y else 1 if x > y else -1 @@ -380,42 +381,10 @@ def _check_utc_offset(name, offset): "-timedelta(hours=24) and timedelta(hours=24)" % (name, offset)) -def _check_int_field(value): - if isinstance(value, int): - return value - if isinstance(value, float): - raise TypeError('integer argument expected, got float') - try: - value = value.__index__() - except AttributeError: - pass - else: - if not isinstance(value, int): - raise TypeError('__index__ returned non-int (type %s)' % - type(value).__name__) - return value - orig = value - try: - value = value.__int__() - except AttributeError: - pass - else: - if not isinstance(value, int): - raise TypeError('__int__ returned non-int (type %s)' % - type(value).__name__) - import warnings - warnings.warn("an integer is required (got type %s)" % - type(orig).__name__, - DeprecationWarning, - stacklevel=2) - return value - raise TypeError('an integer is required (got type %s)' % - type(value).__name__) - def _check_date_fields(year, month, day): - year = _check_int_field(year) - month = _check_int_field(month) - day = _check_int_field(day) + year = _index(year) + month = _index(month) + day = _index(day) if not MINYEAR <= year <= MAXYEAR: raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year) if not 1 <= month <= 12: @@ -426,10 +395,10 @@ def _check_date_fields(year, month, day): return year, month, day def _check_time_fields(hour, minute, second, microsecond, fold): - hour = _check_int_field(hour) - minute = _check_int_field(minute) - second = _check_int_field(second) - microsecond = _check_int_field(microsecond) + hour = _index(hour) + minute = _index(minute) + second = _index(second) + microsecond = _index(microsecond) if not 0 <= hour <= 23: raise ValueError('hour must be in 0..23', hour) if not 0 <= minute <= 59: @@ -2539,10 +2508,10 @@ def _name_from_offset(delta): # Clean up unused names del (_DAYNAMES, _DAYS_BEFORE_MONTH, _DAYS_IN_MONTH, _DI100Y, _DI400Y, _DI4Y, _EPOCH, _MAXORDINAL, _MONTHNAMES, _build_struct_time, - _check_date_fields, _check_int_field, _check_time_fields, + _check_date_fields, _check_time_fields, _check_tzinfo_arg, _check_tzname, _check_utc_offset, _cmp, _cmperror, _date_class, _days_before_month, _days_before_year, _days_in_month, - _format_time, _format_offset, _is_leap, _isoweek1monday, _math, + _format_time, _format_offset, _index, _is_leap, _isoweek1monday, _math, _ord2ymd, _time, _time_class, _tzinfo_class, _wrap_strftime, _ymd2ord, _divide_and_round, _parse_isoformat_date, _parse_isoformat_time, _parse_hh_mm_ss_ff, _IsoCalendarDate) diff --git a/Lib/test/clinic.test b/Lib/test/clinic.test index cb76c3746c307..5e6f129f0926e 100644 --- a/Lib/test/clinic.test +++ b/Lib/test/clinic.test @@ -418,11 +418,6 @@ test_bool_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } c = _PyLong_AsInt(args[2]); if (c == -1 && PyErr_Occurred()) { goto exit; @@ -436,7 +431,7 @@ exit: static PyObject * test_bool_converter_impl(PyObject *module, int a, int b, int c) -/*[clinic end generated code: output=25f20963894256a1 input=939854fa9f248c60]*/ +/*[clinic end generated code: output=b5ec6409d942e0f9 input=939854fa9f248c60]*/ /*[clinic input] @@ -729,11 +724,6 @@ test_unsigned_char_converter(PyObject *module, PyObject *const *args, Py_ssize_t if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[0]); if (ival == -1 && PyErr_Occurred()) { @@ -756,11 +746,6 @@ test_unsigned_char_converter(PyObject *module, PyObject *const *args, Py_ssize_t if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[1]); if (ival == -1 && PyErr_Occurred()) { @@ -783,14 +768,9 @@ test_unsigned_char_converter(PyObject *module, PyObject *const *args, Py_ssize_t if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { - long ival = PyLong_AsUnsignedLongMask(args[2]); - if (ival == -1 && PyErr_Occurred()) { + unsigned long ival = PyLong_AsUnsignedLongMask(args[2]); + if (ival == (unsigned long)-1 && PyErr_Occurred()) { goto exit; } else { @@ -807,7 +787,7 @@ exit: static PyObject * test_unsigned_char_converter_impl(PyObject *module, unsigned char a, unsigned char b, unsigned char c) -/*[clinic end generated code: output=ebf905c5c9414762 input=021414060993e289]*/ +/*[clinic end generated code: output=c0a6ab3144481466 input=021414060993e289]*/ /*[clinic input] @@ -841,11 +821,6 @@ test_short_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[0]); if (ival == -1 && PyErr_Occurred()) { @@ -874,7 +849,7 @@ exit: static PyObject * test_short_converter_impl(PyObject *module, short a) -/*[clinic end generated code: output=86fe1a1496a7ff20 input=6a8a7a509a498ff4]*/ +/*[clinic end generated code: output=3ccda4bd08b6e4b4 input=6a8a7a509a498ff4]*/ /*[clinic input] @@ -925,11 +900,6 @@ test_unsigned_short_converter(PyObject *module, PyObject *const *args, Py_ssize_ if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } c = (unsigned short)PyLong_AsUnsignedLongMask(args[2]); if (c == (unsigned short)-1 && PyErr_Occurred()) { goto exit; @@ -944,7 +914,7 @@ exit: static PyObject * test_unsigned_short_converter_impl(PyObject *module, unsigned short a, unsigned short b, unsigned short c) -/*[clinic end generated code: output=3779fe104319e3ae input=cdfd8eff3d9176b4]*/ +/*[clinic end generated code: output=576b5ce48424f351 input=cdfd8eff3d9176b4]*/ /*[clinic input] @@ -984,11 +954,6 @@ test_int_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } a = _PyLong_AsInt(args[0]); if (a == -1 && PyErr_Occurred()) { goto exit; @@ -996,11 +961,6 @@ test_int_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } b = _PyLong_AsInt(args[1]); if (b == -1 && PyErr_Occurred()) { goto exit; @@ -1023,11 +983,6 @@ test_int_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 4) { goto skip_optional; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } d = _PyLong_AsInt(args[3]); if (d == -1 && PyErr_Occurred()) { goto exit; @@ -1041,7 +996,7 @@ exit: static PyObject * test_int_converter_impl(PyObject *module, int a, int b, int c, myenum d) -/*[clinic end generated code: output=10a2e48a34af5d7a input=d20541fc1ca0553e]*/ +/*[clinic end generated code: output=8a1a7b02ebe9eeac input=d20541fc1ca0553e]*/ /*[clinic input] @@ -1092,11 +1047,6 @@ test_unsigned_int_converter(PyObject *module, PyObject *const *args, Py_ssize_t if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } c = (unsigned int)PyLong_AsUnsignedLongMask(args[2]); if (c == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -1111,7 +1061,7 @@ exit: static PyObject * test_unsigned_int_converter_impl(PyObject *module, unsigned int a, unsigned int b, unsigned int c) -/*[clinic end generated code: output=189176ce67c7d2e7 input=5533534828b62fc0]*/ +/*[clinic end generated code: output=4f53904bfa1a0250 input=5533534828b62fc0]*/ /*[clinic input] @@ -1145,11 +1095,6 @@ test_long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } a = PyLong_AsLong(args[0]); if (a == -1 && PyErr_Occurred()) { goto exit; @@ -1163,7 +1108,7 @@ exit: static PyObject * test_long_converter_impl(PyObject *module, long a) -/*[clinic end generated code: output=44cd8823f59d116b input=d2179e3c9cdcde89]*/ +/*[clinic end generated code: output=e5e7883fddcf4218 input=d2179e3c9cdcde89]*/ /*[clinic input] @@ -1263,11 +1208,6 @@ test_long_long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } a = PyLong_AsLongLong(args[0]); if (a == -1 && PyErr_Occurred()) { goto exit; @@ -1281,7 +1221,7 @@ exit: static PyObject * test_long_long_converter_impl(PyObject *module, long long a) -/*[clinic end generated code: output=7143b585d7e433e8 input=d5fc81577ff4dd02]*/ +/*[clinic end generated code: output=0488ac9e8c1d77bb input=d5fc81577ff4dd02]*/ /*[clinic input] @@ -1390,11 +1330,6 @@ test_Py_ssize_t_converter(PyObject *module, PyObject *const *args, Py_ssize_t na if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -1410,11 +1345,6 @@ test_Py_ssize_t_converter(PyObject *module, PyObject *const *args, Py_ssize_t na if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -1443,7 +1373,7 @@ exit: static PyObject * test_Py_ssize_t_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, Py_ssize_t c) -/*[clinic end generated code: output=a46d2aaf40c10398 input=3855f184bb3f299d]*/ +/*[clinic end generated code: output=ea781bb7169b3436 input=3855f184bb3f299d]*/ /*[clinic input] diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index a9741d6d4062f..520a51df87999 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -5107,43 +5107,21 @@ def __init__(self, value): def __int__(self): return self.value - for xx in [decimal.Decimal(10), - decimal.Decimal('10.9'), - Number(10)]: - with self.assertWarns(DeprecationWarning): - self.assertEqual(datetime(10, 10, 10, 10, 10, 10, 10), - datetime(xx, xx, xx, xx, xx, xx, xx)) - - with self.assertRaisesRegex(TypeError, '^an integer is required ' - r'\(got type str\)$'): - datetime(10, 10, '10') - - f10 = Number(10.9) - with self.assertRaisesRegex(TypeError, '^__int__ returned non-int ' - r'\(type float\)$'): - datetime(10, 10, f10) - class Float(float): pass - s10 = Float(10.9) - with self.assertRaisesRegex(TypeError, '^integer argument expected, ' - 'got float$'): - datetime(10, 10, s10) - with self.assertRaises(TypeError): - datetime(10., 10, 10) - with self.assertRaises(TypeError): - datetime(10, 10., 10) - with self.assertRaises(TypeError): - datetime(10, 10, 10.) - with self.assertRaises(TypeError): - datetime(10, 10, 10, 10.) - with self.assertRaises(TypeError): - datetime(10, 10, 10, 10, 10.) - with self.assertRaises(TypeError): - datetime(10, 10, 10, 10, 10, 10.) - with self.assertRaises(TypeError): - datetime(10, 10, 10, 10, 10, 10, 10.) + for xx in [10.0, Float(10.9), + decimal.Decimal(10), decimal.Decimal('10.9'), + Number(10), Number(10.9), + '10']: + self.assertRaises(TypeError, datetime, xx, 10, 10, 10, 10, 10, 10) + self.assertRaises(TypeError, datetime, 10, xx, 10, 10, 10, 10, 10) + self.assertRaises(TypeError, datetime, 10, 10, xx, 10, 10, 10, 10) + self.assertRaises(TypeError, datetime, 10, 10, 10, xx, 10, 10, 10) + self.assertRaises(TypeError, datetime, 10, 10, 10, 10, xx, 10, 10) + self.assertRaises(TypeError, datetime, 10, 10, 10, 10, 10, xx, 10) + self.assertRaises(TypeError, datetime, 10, 10, 10, 10, 10, 10, xx) + ############################################################################# # Local Time Disambiguation diff --git a/Lib/test/test_getargs2.py b/Lib/test/test_getargs2.py index 1a73fa4615806..0dec5b1874e6d 100644 --- a/Lib/test/test_getargs2.py +++ b/Lib/test/test_getargs2.py @@ -161,12 +161,10 @@ def test_b(self): with self.assertWarns(DeprecationWarning): self.assertEqual(1, getargs_b(BadIndex2())) self.assertEqual(0, getargs_b(BadIndex3())) - with self.assertWarns(DeprecationWarning): - self.assertEqual(99, getargs_b(Int())) + self.assertRaises(TypeError, getargs_b, Int()) self.assertEqual(0, getargs_b(IntSubclass())) self.assertRaises(TypeError, getargs_b, BadInt()) - with self.assertWarns(DeprecationWarning): - self.assertEqual(1, getargs_b(BadInt2())) + self.assertRaises(TypeError, getargs_b, BadInt2()) self.assertEqual(0, getargs_b(BadInt3())) self.assertRaises(OverflowError, getargs_b, -1) @@ -187,12 +185,10 @@ def test_B(self): with self.assertWarns(DeprecationWarning): self.assertEqual(1, getargs_B(BadIndex2())) self.assertEqual(0, getargs_B(BadIndex3())) - with self.assertWarns(DeprecationWarning): - self.assertEqual(99, getargs_B(Int())) + self.assertRaises(TypeError, getargs_B, Int()) self.assertEqual(0, getargs_B(IntSubclass())) self.assertRaises(TypeError, getargs_B, BadInt()) - with self.assertWarns(DeprecationWarning): - self.assertEqual(1, getargs_B(BadInt2())) + self.assertRaises(TypeError, getargs_B, BadInt2()) self.assertEqual(0, getargs_B(BadInt3())) self.assertEqual(UCHAR_MAX, getargs_B(-1)) @@ -213,12 +209,10 @@ def test_H(self): with self.assertWarns(DeprecationWarning): self.assertEqual(1, getargs_H(BadIndex2())) self.assertEqual(0, getargs_H(BadIndex3())) - with self.assertWarns(DeprecationWarning): - self.assertEqual(99, getargs_H(Int())) + self.assertRaises(TypeError, getargs_H, Int()) self.assertEqual(0, getargs_H(IntSubclass())) self.assertRaises(TypeError, getargs_H, BadInt()) - with self.assertWarns(DeprecationWarning): - self.assertEqual(1, getargs_H(BadInt2())) + self.assertRaises(TypeError, getargs_H, BadInt2()) self.assertEqual(0, getargs_H(BadInt3())) self.assertEqual(USHRT_MAX, getargs_H(-1)) @@ -240,12 +234,10 @@ def test_I(self): with self.assertWarns(DeprecationWarning): self.assertEqual(1, getargs_I(BadIndex2())) self.assertEqual(0, getargs_I(BadIndex3())) - with self.assertWarns(DeprecationWarning): - self.assertEqual(99, getargs_I(Int())) + self.assertRaises(TypeError, getargs_I, Int()) self.assertEqual(0, getargs_I(IntSubclass())) self.assertRaises(TypeError, getargs_I, BadInt()) - with self.assertWarns(DeprecationWarning): - self.assertEqual(1, getargs_I(BadInt2())) + self.assertRaises(TypeError, getargs_I, BadInt2()) self.assertEqual(0, getargs_I(BadInt3())) self.assertEqual(UINT_MAX, getargs_I(-1)) @@ -293,12 +285,10 @@ def test_h(self): with self.assertWarns(DeprecationWarning): self.assertEqual(1, getargs_h(BadIndex2())) self.assertEqual(0, getargs_h(BadIndex3())) - with self.assertWarns(DeprecationWarning): - self.assertEqual(99, getargs_h(Int())) + self.assertRaises(TypeError, getargs_h, Int()) self.assertEqual(0, getargs_h(IntSubclass())) self.assertRaises(TypeError, getargs_h, BadInt()) - with self.assertWarns(DeprecationWarning): - self.assertEqual(1, getargs_h(BadInt2())) + self.assertRaises(TypeError, getargs_h, BadInt2()) self.assertEqual(0, getargs_h(BadInt3())) self.assertRaises(OverflowError, getargs_h, SHRT_MIN-1) @@ -319,12 +309,10 @@ def test_i(self): with self.assertWarns(DeprecationWarning): self.assertEqual(1, getargs_i(BadIndex2())) self.assertEqual(0, getargs_i(BadIndex3())) - with self.assertWarns(DeprecationWarning): - self.assertEqual(99, getargs_i(Int())) + self.assertRaises(TypeError, getargs_i, Int()) self.assertEqual(0, getargs_i(IntSubclass())) self.assertRaises(TypeError, getargs_i, BadInt()) - with self.assertWarns(DeprecationWarning): - self.assertEqual(1, getargs_i(BadInt2())) + self.assertRaises(TypeError, getargs_i, BadInt2()) self.assertEqual(0, getargs_i(BadInt3())) self.assertRaises(OverflowError, getargs_i, INT_MIN-1) @@ -345,12 +333,10 @@ def test_l(self): with self.assertWarns(DeprecationWarning): self.assertEqual(1, getargs_l(BadIndex2())) self.assertEqual(0, getargs_l(BadIndex3())) - with self.assertWarns(DeprecationWarning): - self.assertEqual(99, getargs_l(Int())) + self.assertRaises(TypeError, getargs_l, Int()) self.assertEqual(0, getargs_l(IntSubclass())) self.assertRaises(TypeError, getargs_l, BadInt()) - with self.assertWarns(DeprecationWarning): - self.assertEqual(1, getargs_l(BadInt2())) + self.assertRaises(TypeError, getargs_l, BadInt2()) self.assertEqual(0, getargs_l(BadInt3())) self.assertRaises(OverflowError, getargs_l, LONG_MIN-1) @@ -400,12 +386,10 @@ def test_L(self): with self.assertWarns(DeprecationWarning): self.assertEqual(1, getargs_L(BadIndex2())) self.assertEqual(0, getargs_L(BadIndex3())) - with self.assertWarns(DeprecationWarning): - self.assertEqual(99, getargs_L(Int())) + self.assertRaises(TypeError, getargs_L, Int()) self.assertEqual(0, getargs_L(IntSubclass())) self.assertRaises(TypeError, getargs_L, BadInt()) - with self.assertWarns(DeprecationWarning): - self.assertEqual(1, getargs_L(BadInt2())) + self.assertRaises(TypeError, getargs_L, BadInt2()) self.assertEqual(0, getargs_L(BadInt3())) self.assertRaises(OverflowError, getargs_L, LLONG_MIN-1) diff --git a/Lib/test/test_grp.py b/Lib/test/test_grp.py index e511947858c0a..0993f091f5956 100644 --- a/Lib/test/test_grp.py +++ b/Lib/test/test_grp.py @@ -100,8 +100,8 @@ def test_noninteger_gid(self): self.skipTest('no groups') # Choose an existent gid. gid = entries[0][2] - self.assertWarns(DeprecationWarning, grp.getgrgid, float(gid)) - self.assertWarns(DeprecationWarning, grp.getgrgid, str(gid)) + self.assertRaises(TypeError, grp.getgrgid, float(gid)) + self.assertRaises(TypeError, grp.getgrgid, str(gid)) if __name__ == "__main__": diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py index 6fdf52ef23f65..d6be64e7c18a0 100644 --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -517,10 +517,7 @@ def __trunc__(self): self.assertIs(type(n), int) bad_int = TruncReturnsBadInt() - with self.assertWarns(DeprecationWarning): - n = int(bad_int) - self.assertEqual(n, 1) - self.assertIs(type(n), int) + self.assertRaises(TypeError, int, bad_int) good_int = TruncReturnsIntSubclass() n = int(good_int) diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py index 4b848a5e7e5f8..e06b1e6a5b9b7 100644 --- a/Lib/test/test_math.py +++ b/Lib/test/test_math.py @@ -502,14 +502,10 @@ def testFactorial(self): self.assertRaises(ValueError, math.factorial, -10**100) def testFactorialNonIntegers(self): - with self.assertWarns(DeprecationWarning): - self.assertEqual(math.factorial(5.0), 120) - with self.assertWarns(DeprecationWarning): - self.assertRaises(ValueError, math.factorial, 5.2) - with self.assertWarns(DeprecationWarning): - self.assertRaises(ValueError, math.factorial, -1.0) - with self.assertWarns(DeprecationWarning): - self.assertRaises(ValueError, math.factorial, -1e100) + self.assertRaises(TypeError, math.factorial, 5.0) + self.assertRaises(TypeError, math.factorial, 5.2) + self.assertRaises(TypeError, math.factorial, -1.0) + self.assertRaises(TypeError, math.factorial, -1e100) self.assertRaises(TypeError, math.factorial, decimal.Decimal('5')) self.assertRaises(TypeError, math.factorial, decimal.Decimal('5.2')) self.assertRaises(TypeError, math.factorial, "5") @@ -520,8 +516,7 @@ def testFactorialHugeInputs(self): # Currently raises OverflowError for inputs that are too large # to fit into a C long. self.assertRaises(OverflowError, math.factorial, 10**100) - with self.assertWarns(DeprecationWarning): - self.assertRaises(OverflowError, math.factorial, 1e100) + self.assertRaises(TypeError, math.factorial, 1e100) def testFloor(self): self.assertRaises(TypeError, math.floor) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index aefba4f397b10..dc1330735df10 100755 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -917,10 +917,8 @@ def testSendtoErrors(self): self.assertIn('not NoneType', str(cm.exception)) with self.assertRaises(TypeError) as cm: s.sendto(b'foo', 'bar', sockname) - self.assertIn('an integer is required', str(cm.exception)) with self.assertRaises(TypeError) as cm: s.sendto(b'foo', None, None) - self.assertIn('an integer is required', str(cm.exception)) # wrong number of args with self.assertRaises(TypeError) as cm: s.sendto(b'foo') @@ -1899,11 +1897,11 @@ def test_socket_fileno(self): socket.SOCK_STREAM) def test_socket_fileno_rejects_float(self): - with self.assertRaisesRegex(TypeError, "integer argument expected"): + with self.assertRaises(TypeError): socket.socket(socket.AF_INET, socket.SOCK_STREAM, fileno=42.5) def test_socket_fileno_rejects_other_types(self): - with self.assertRaisesRegex(TypeError, "integer is required"): + with self.assertRaises(TypeError): socket.socket(socket.AF_INET, socket.SOCK_STREAM, fileno="foo") def test_socket_fileno_rejects_invalid_socket(self): diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-09-01-14-26-02.bpo-37999.XPl6dn.rst b/Misc/NEWS.d/next/Core and Builtins/2019-09-01-14-26-02.bpo-37999.XPl6dn.rst new file mode 100644 index 0000000000000..8d7e9369af4c6 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-09-01-14-26-02.bpo-37999.XPl6dn.rst @@ -0,0 +1,5 @@ +Builtin and extension functions that take integer arguments no longer accept +:class:`~decimal.Decimal`\ s, :class:`~fractions.Fraction`\ s and other +objects that can be converted to integers only with a loss (e.g. that have +the :meth:`~object.__int__` method but do not have the +:meth:`~object.__index__` method). diff --git a/Modules/_blake2/clinic/blake2b_impl.c.h b/Modules/_blake2/clinic/blake2b_impl.c.h index 07258c31c9bff..4e74e0885cf23 100644 --- a/Modules/_blake2/clinic/blake2b_impl.c.h +++ b/Modules/_blake2/clinic/blake2b_impl.c.h @@ -56,11 +56,6 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) goto skip_optional_kwonly; } if (fastargs[1]) { - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } digest_size = _PyLong_AsInt(fastargs[1]); if (digest_size == -1 && PyErr_Occurred()) { goto exit; @@ -106,11 +101,6 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } } if (fastargs[5]) { - if (PyFloat_Check(fastargs[5])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fanout = _PyLong_AsInt(fastargs[5]); if (fanout == -1 && PyErr_Occurred()) { goto exit; @@ -120,11 +110,6 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } } if (fastargs[6]) { - if (PyFloat_Check(fastargs[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } depth = _PyLong_AsInt(fastargs[6]); if (depth == -1 && PyErr_Occurred()) { goto exit; @@ -150,11 +135,6 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } } if (fastargs[9]) { - if (PyFloat_Check(fastargs[9])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } node_depth = _PyLong_AsInt(fastargs[9]); if (node_depth == -1 && PyErr_Occurred()) { goto exit; @@ -164,11 +144,6 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } } if (fastargs[10]) { - if (PyFloat_Check(fastargs[10])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } inner_size = _PyLong_AsInt(fastargs[10]); if (inner_size == -1 && PyErr_Occurred()) { goto exit; @@ -272,4 +247,4 @@ _blake2_blake2b_hexdigest(BLAKE2bObject *self, PyObject *Py_UNUSED(ignored)) { return _blake2_blake2b_hexdigest_impl(self); } -/*[clinic end generated code: output=2d6d0fe9aa42a42a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=10eb47aba77f192d input=a9049054013a1b77]*/ diff --git a/Modules/_blake2/clinic/blake2s_impl.c.h b/Modules/_blake2/clinic/blake2s_impl.c.h index 71c5706fb660b..0f0d9835fbfe2 100644 --- a/Modules/_blake2/clinic/blake2s_impl.c.h +++ b/Modules/_blake2/clinic/blake2s_impl.c.h @@ -56,11 +56,6 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) goto skip_optional_kwonly; } if (fastargs[1]) { - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } digest_size = _PyLong_AsInt(fastargs[1]); if (digest_size == -1 && PyErr_Occurred()) { goto exit; @@ -106,11 +101,6 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } } if (fastargs[5]) { - if (PyFloat_Check(fastargs[5])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fanout = _PyLong_AsInt(fastargs[5]); if (fanout == -1 && PyErr_Occurred()) { goto exit; @@ -120,11 +110,6 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } } if (fastargs[6]) { - if (PyFloat_Check(fastargs[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } depth = _PyLong_AsInt(fastargs[6]); if (depth == -1 && PyErr_Occurred()) { goto exit; @@ -150,11 +135,6 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } } if (fastargs[9]) { - if (PyFloat_Check(fastargs[9])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } node_depth = _PyLong_AsInt(fastargs[9]); if (node_depth == -1 && PyErr_Occurred()) { goto exit; @@ -164,11 +144,6 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } } if (fastargs[10]) { - if (PyFloat_Check(fastargs[10])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } inner_size = _PyLong_AsInt(fastargs[10]); if (inner_size == -1 && PyErr_Occurred()) { goto exit; @@ -272,4 +247,4 @@ _blake2_blake2s_hexdigest(BLAKE2sObject *self, PyObject *Py_UNUSED(ignored)) { return _blake2_blake2s_hexdigest_impl(self); } -/*[clinic end generated code: output=c80d8d06ce40a192 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f7ee8092ed67e9c7 input=a9049054013a1b77]*/ diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index a72682d7292ca..7f853190a785e 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -354,14 +354,7 @@ PyTypeObject PyCField_Type = { static int get_long(PyObject *v, long *p) { - long x; - - if (PyFloat_Check(v)) { - PyErr_SetString(PyExc_TypeError, - "int expected instead of float"); - return -1; - } - x = PyLong_AsUnsignedLongMask(v); + long x = PyLong_AsUnsignedLongMask(v); if (x == -1 && PyErr_Occurred()) return -1; *p = x; @@ -373,14 +366,7 @@ get_long(PyObject *v, long *p) static int get_ulong(PyObject *v, unsigned long *p) { - unsigned long x; - - if (PyFloat_Check(v)) { - PyErr_SetString(PyExc_TypeError, - "int expected instead of float"); - return -1; - } - x = PyLong_AsUnsignedLongMask(v); + unsigned long x = PyLong_AsUnsignedLongMask(v); if (x == (unsigned long)-1 && PyErr_Occurred()) return -1; *p = x; @@ -392,13 +378,7 @@ get_ulong(PyObject *v, unsigned long *p) static int get_longlong(PyObject *v, long long *p) { - long long x; - if (PyFloat_Check(v)) { - PyErr_SetString(PyExc_TypeError, - "int expected instead of float"); - return -1; - } - x = PyLong_AsUnsignedLongLongMask(v); + long long x = PyLong_AsUnsignedLongLongMask(v); if (x == -1 && PyErr_Occurred()) return -1; *p = x; @@ -410,13 +390,7 @@ get_longlong(PyObject *v, long long *p) static int get_ulonglong(PyObject *v, unsigned long long *p) { - unsigned long long x; - if (PyFloat_Check(v)) { - PyErr_SetString(PyExc_TypeError, - "int expected instead of float"); - return -1; - } - x = PyLong_AsUnsignedLongLongMask(v); + unsigned long long x = PyLong_AsUnsignedLongLongMask(v); if (x == (unsigned long long)-1 && PyErr_Occurred()) return -1; *p = x; diff --git a/Modules/_io/clinic/_iomodule.c.h b/Modules/_io/clinic/_iomodule.c.h index 1a9651d340813..dc7b5ff243a78 100644 --- a/Modules/_io/clinic/_iomodule.c.h +++ b/Modules/_io/clinic/_iomodule.c.h @@ -178,11 +178,6 @@ _io_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw } } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } buffering = _PyLong_AsInt(args[2]); if (buffering == -1 && PyErr_Occurred()) { goto exit; @@ -261,11 +256,6 @@ _io_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw } } if (args[6]) { - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } closefd = _PyLong_AsInt(args[6]); if (closefd == -1 && PyErr_Occurred()) { goto exit; @@ -323,4 +313,4 @@ _io_open_code(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=3df6bc6d91697545 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=5c0dd7a262c30ebc input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/bufferedio.c.h b/Modules/_io/clinic/bufferedio.c.h index 56d6332a25058..1961ed94c5535 100644 --- a/Modules/_io/clinic/bufferedio.c.h +++ b/Modules/_io/clinic/bufferedio.c.h @@ -120,11 +120,6 @@ _io__Buffered_peek(buffered *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -200,11 +195,6 @@ _io__Buffered_read1(buffered *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -356,11 +346,6 @@ _io__Buffered_seek(buffered *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } whence = _PyLong_AsInt(args[1]); if (whence == -1 && PyErr_Occurred()) { goto exit; @@ -434,11 +419,6 @@ _io_BufferedReader___init__(PyObject *self, PyObject *args, PyObject *kwargs) if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(fastargs[1]); @@ -493,11 +473,6 @@ _io_BufferedWriter___init__(PyObject *self, PyObject *args, PyObject *kwargs) if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(fastargs[1]); @@ -590,11 +565,6 @@ _io_BufferedRWPair___init__(PyObject *self, PyObject *args, PyObject *kwargs) if (PyTuple_GET_SIZE(args) < 3) { goto skip_optional; } - if (PyFloat_Check(PyTuple_GET_ITEM(args, 2))) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(PyTuple_GET_ITEM(args, 2)); @@ -649,11 +619,6 @@ _io_BufferedRandom___init__(PyObject *self, PyObject *args, PyObject *kwargs) if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(fastargs[1]); @@ -672,4 +637,4 @@ _io_BufferedRandom___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=7d9ad40c95bdd808 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1882bb497ddc9375 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/bytesio.c.h b/Modules/_io/clinic/bytesio.c.h index 83cd490dc5980..4720bdd655586 100644 --- a/Modules/_io/clinic/bytesio.c.h +++ b/Modules/_io/clinic/bytesio.c.h @@ -402,11 +402,6 @@ _io_BytesIO_seek(bytesio *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("seek", nargs, 1, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -422,11 +417,6 @@ _io_BytesIO_seek(bytesio *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } whence = _PyLong_AsInt(args[1]); if (whence == -1 && PyErr_Occurred()) { goto exit; @@ -515,4 +505,4 @@ _io_BytesIO___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=4ec2506def9c8eb9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ba0f302f16397741 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/fileio.c.h b/Modules/_io/clinic/fileio.c.h index 53e7067cf7a74..9b237c156d373 100644 --- a/Modules/_io/clinic/fileio.c.h +++ b/Modules/_io/clinic/fileio.c.h @@ -87,11 +87,6 @@ _io_FileIO___init__(PyObject *self, PyObject *args, PyObject *kwargs) } } if (fastargs[2]) { - if (PyFloat_Check(fastargs[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } closefd = _PyLong_AsInt(fastargs[2]); if (closefd == -1 && PyErr_Occurred()) { goto exit; @@ -351,11 +346,6 @@ _io_FileIO_seek(fileio *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } whence = _PyLong_AsInt(args[1]); if (whence == -1 && PyErr_Occurred()) { goto exit; @@ -447,4 +437,4 @@ _io_FileIO_isatty(fileio *self, PyObject *Py_UNUSED(ignored)) #ifndef _IO_FILEIO_TRUNCATE_METHODDEF #define _IO_FILEIO_TRUNCATE_METHODDEF #endif /* !defined(_IO_FILEIO_TRUNCATE_METHODDEF) */ -/*[clinic end generated code: output=e7682d0a3264d284 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=3479912ec0f7e029 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/iobase.c.h b/Modules/_io/clinic/iobase.c.h index ddaff7b5d135d..02a2ab86c4e37 100644 --- a/Modules/_io/clinic/iobase.c.h +++ b/Modules/_io/clinic/iobase.c.h @@ -274,11 +274,6 @@ _io__RawIOBase_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -315,4 +310,4 @@ _io__RawIOBase_readall(PyObject *self, PyObject *Py_UNUSED(ignored)) { return _io__RawIOBase_readall_impl(self); } -/*[clinic end generated code: output=61b6ea7153ef9940 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1f9ce590549593be input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/stringio.c.h b/Modules/_io/clinic/stringio.c.h index 77a720c2a6ff1..2b32319f985fc 100644 --- a/Modules/_io/clinic/stringio.c.h +++ b/Modules/_io/clinic/stringio.c.h @@ -177,11 +177,6 @@ _io_StringIO_seek(stringio *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("seek", nargs, 1, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -197,11 +192,6 @@ _io_StringIO_seek(stringio *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } whence = _PyLong_AsInt(args[1]); if (whence == -1 && PyErr_Occurred()) { goto exit; @@ -348,4 +338,4 @@ _io_StringIO_seekable(stringio *self, PyObject *Py_UNUSED(ignored)) { return _io_StringIO_seekable_impl(self); } -/*[clinic end generated code: output=7aad5ab2e64a25b8 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=9c428b2942d54991 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h index b8b507543ea81..f0ad69ce33238 100644 --- a/Modules/_io/clinic/textio.c.h +++ b/Modules/_io/clinic/textio.c.h @@ -39,11 +39,6 @@ _io_IncrementalNewlineDecoder___init__(PyObject *self, PyObject *args, PyObject goto exit; } decoder = fastargs[0]; - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } translate = _PyLong_AsInt(fastargs[1]); if (translate == -1 && PyErr_Occurred()) { goto exit; @@ -90,11 +85,6 @@ _io_IncrementalNewlineDecoder_decode(nldecoder_object *self, PyObject *const *ar if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[1]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -266,11 +256,6 @@ _io_TextIOWrapper___init__(PyObject *self, PyObject *args, PyObject *kwargs) } } if (fastargs[4]) { - if (PyFloat_Check(fastargs[4])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } line_buffering = _PyLong_AsInt(fastargs[4]); if (line_buffering == -1 && PyErr_Occurred()) { goto exit; @@ -279,11 +264,6 @@ _io_TextIOWrapper___init__(PyObject *self, PyObject *args, PyObject *kwargs) goto skip_optional_pos; } } - if (PyFloat_Check(fastargs[5])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } write_through = _PyLong_AsInt(fastargs[5]); if (write_through == -1 && PyErr_Occurred()) { goto exit; @@ -470,11 +450,6 @@ _io_TextIOWrapper_readline(textio *self, PyObject *const *args, Py_ssize_t nargs if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -519,11 +494,6 @@ _io_TextIOWrapper_seek(textio *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } whence = _PyLong_AsInt(args[1]); if (whence == -1 && PyErr_Occurred()) { goto exit; @@ -701,4 +671,4 @@ _io_TextIOWrapper_close(textio *self, PyObject *Py_UNUSED(ignored)) { return _io_TextIOWrapper_close_impl(self); } -/*[clinic end generated code: output=b1bae4f4cdf6019e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ea96ee1eb3a71f77 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/winconsoleio.c.h b/Modules/_io/clinic/winconsoleio.c.h index 3e501a5853716..cf6ce603ff3fc 100644 --- a/Modules/_io/clinic/winconsoleio.c.h +++ b/Modules/_io/clinic/winconsoleio.c.h @@ -86,11 +86,6 @@ _io__WindowsConsoleIO___init__(PyObject *self, PyObject *args, PyObject *kwargs) } } if (fastargs[2]) { - if (PyFloat_Check(fastargs[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } closefd = _PyLong_AsInt(fastargs[2]); if (closefd == -1 && PyErr_Occurred()) { goto exit; @@ -386,4 +381,4 @@ _io__WindowsConsoleIO_isatty(winconsoleio *self, PyObject *Py_UNUSED(ignored)) #ifndef _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF #define _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF #endif /* !defined(_IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF) */ -/*[clinic end generated code: output=f5b8860a658a001a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a28b3120fa53b256 input=a9049054013a1b77]*/ diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index caf91dfdb749e..7c8ba37c4fe94 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -255,12 +255,6 @@ _io_FileIO___init___impl(fileio *self, PyObject *nameobj, const char *mode, self->fd = -1; } - if (PyFloat_Check(nameobj)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float"); - return -1; - } - fd = _PyLong_AsInt(nameobj); if (fd < 0) { if (!PyErr_Occurred()) { @@ -895,10 +889,6 @@ portable_lseek(fileio *self, PyObject *posobj, int whence, bool suppress_pipe_er pos = 0; } else { - if(PyFloat_Check(posobj)) { - PyErr_SetString(PyExc_TypeError, "an integer is required"); - return NULL; - } #if defined(HAVE_LARGEFILE_SUPPORT) pos = PyLong_AsLongLong(posobj); #else diff --git a/Modules/_io/winconsoleio.c b/Modules/_io/winconsoleio.c index a83ef37a1fcf7..4ccf0273403a1 100644 --- a/Modules/_io/winconsoleio.c +++ b/Modules/_io/winconsoleio.c @@ -281,12 +281,6 @@ _io__WindowsConsoleIO___init___impl(winconsoleio *self, PyObject *nameobj, self->handle = INVALID_HANDLE_VALUE; } - if (PyFloat_Check(nameobj)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float"); - return -1; - } - fd = _PyLong_AsInt(nameobj); if (fd < 0) { if (!PyErr_Occurred()) { diff --git a/Modules/_multiprocessing/clinic/posixshmem.c.h b/Modules/_multiprocessing/clinic/posixshmem.c.h index a99f0d2aae1dc..3424b10a569f8 100644 --- a/Modules/_multiprocessing/clinic/posixshmem.c.h +++ b/Modules/_multiprocessing/clinic/posixshmem.c.h @@ -42,11 +42,6 @@ _posixshmem_shm_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, goto exit; } path = args[0]; - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[1]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -54,11 +49,6 @@ _posixshmem_shm_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[2]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -130,4 +120,4 @@ _posixshmem_shm_unlink(PyObject *module, PyObject *const *args, Py_ssize_t nargs #ifndef _POSIXSHMEM_SHM_UNLINK_METHODDEF #define _POSIXSHMEM_SHM_UNLINK_METHODDEF #endif /* !defined(_POSIXSHMEM_SHM_UNLINK_METHODDEF) */ -/*[clinic end generated code: output=9132861c61d8c2d8 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=bca8e78d0f43ef1a input=a9049054013a1b77]*/ diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 732703e481adc..fb1b82cd6a6ec 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -337,17 +337,6 @@ II_getitem(arrayobject *ap, Py_ssize_t i) (unsigned long) ((unsigned int *)ap->ob_item)[i]); } -static PyObject * -get_int_unless_float(PyObject *v) -{ - if (PyFloat_Check(v)) { - PyErr_SetString(PyExc_TypeError, - "array item must be integer"); - return NULL; - } - return _PyLong_FromNbIndexOrNbInt(v); -} - static int II_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) { @@ -355,7 +344,7 @@ II_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) int do_decref = 0; /* if nb_int was called */ if (!PyLong_Check(v)) { - v = get_int_unless_float(v); + v = PyNumber_Index(v); if (NULL == v) { return -1; } @@ -415,7 +404,7 @@ LL_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) int do_decref = 0; /* if nb_int was called */ if (!PyLong_Check(v)) { - v = get_int_unless_float(v); + v = PyNumber_Index(v); if (NULL == v) { return -1; } @@ -468,7 +457,7 @@ QQ_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) int do_decref = 0; /* if nb_int was called */ if (!PyLong_Check(v)) { - v = get_int_unless_float(v); + v = PyNumber_Index(v); if (NULL == v) { return -1; } diff --git a/Modules/cjkcodecs/clinic/multibytecodec.c.h b/Modules/cjkcodecs/clinic/multibytecodec.c.h index 5ddbbe221b98b..563888370d06c 100644 --- a/Modules/cjkcodecs/clinic/multibytecodec.c.h +++ b/Modules/cjkcodecs/clinic/multibytecodec.c.h @@ -171,11 +171,6 @@ _multibytecodec_MultibyteIncrementalEncoder_encode(MultibyteIncrementalEncoderOb if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[1]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -288,11 +283,6 @@ _multibytecodec_MultibyteIncrementalDecoder_decode(MultibyteIncrementalDecoderOb if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[1]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -525,4 +515,4 @@ PyDoc_STRVAR(_multibytecodec___create_codec__doc__, #define _MULTIBYTECODEC___CREATE_CODEC_METHODDEF \ {"__create_codec", (PyCFunction)_multibytecodec___create_codec, METH_O, _multibytecodec___create_codec__doc__}, -/*[clinic end generated code: output=5ce6fd4ca1f95620 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=5c0f74129db07c87 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_bisectmodule.c.h b/Modules/clinic/_bisectmodule.c.h index 80ab7048f1428..8a0170a2c7f56 100644 --- a/Modules/clinic/_bisectmodule.c.h +++ b/Modules/clinic/_bisectmodule.c.h @@ -46,11 +46,6 @@ _bisect_bisect_right(PyObject *module, PyObject *const *args, Py_ssize_t nargs, goto skip_optional_pos; } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -122,11 +117,6 @@ _bisect_insort_right(PyObject *module, PyObject *const *args, Py_ssize_t nargs, goto skip_optional_pos; } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -197,11 +187,6 @@ _bisect_bisect_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P goto skip_optional_pos; } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -273,11 +258,6 @@ _bisect_insort_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P goto skip_optional_pos; } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -303,4 +283,4 @@ _bisect_insort_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P exit: return return_value; } -/*[clinic end generated code: output=bcbd6c77331a08f0 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e9097a9acd10b13f input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_bz2module.c.h b/Modules/clinic/_bz2module.c.h index 0eb6280d6e029..ab610a141e300 100644 --- a/Modules/clinic/_bz2module.c.h +++ b/Modules/clinic/_bz2module.c.h @@ -95,11 +95,6 @@ _bz2_BZ2Compressor___init__(PyObject *self, PyObject *args, PyObject *kwargs) if (PyTuple_GET_SIZE(args) < 1) { goto skip_optional; } - if (PyFloat_Check(PyTuple_GET_ITEM(args, 0))) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } compresslevel = _PyLong_AsInt(PyTuple_GET_ITEM(args, 0)); if (compresslevel == -1 && PyErr_Occurred()) { goto exit; @@ -162,11 +157,6 @@ _bz2_BZ2Decompressor_decompress(BZ2Decompressor *self, PyObject *const *args, Py if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -220,4 +210,4 @@ _bz2_BZ2Decompressor___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=3f3f1e788fe28ee1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c69a7de8e26c2ad1 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_codecsmodule.c.h b/Modules/clinic/_codecsmodule.c.h index 772c8ca538da2..249065c9fd05a 100644 --- a/Modules/clinic/_codecsmodule.c.h +++ b/Modules/clinic/_codecsmodule.c.h @@ -424,11 +424,6 @@ _codecs_utf_7_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -499,11 +494,6 @@ _codecs_utf_8_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -574,11 +564,6 @@ _codecs_utf_16_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -649,11 +634,6 @@ _codecs_utf_16_le_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -724,11 +704,6 @@ _codecs_utf_16_be_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -801,11 +776,6 @@ _codecs_utf_16_ex_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } byteorder = _PyLong_AsInt(args[2]); if (byteorder == -1 && PyErr_Occurred()) { goto exit; @@ -813,11 +783,6 @@ _codecs_utf_16_ex_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 4) { goto skip_optional; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[3]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -888,11 +853,6 @@ _codecs_utf_32_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -963,11 +923,6 @@ _codecs_utf_32_le_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -1038,11 +993,6 @@ _codecs_utf_32_be_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -1115,11 +1065,6 @@ _codecs_utf_32_ex_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } byteorder = _PyLong_AsInt(args[2]); if (byteorder == -1 && PyErr_Occurred()) { goto exit; @@ -1127,11 +1072,6 @@ _codecs_utf_32_ex_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 4) { goto skip_optional; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[3]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -1539,11 +1479,6 @@ _codecs_mbcs_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -1618,11 +1553,6 @@ _codecs_oem_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[2]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -1667,11 +1597,6 @@ _codecs_code_page_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (!_PyArg_CheckPositional("code_page_decode", nargs, 2, 4)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } codepage = _PyLong_AsInt(args[0]); if (codepage == -1 && PyErr_Occurred()) { goto exit; @@ -1707,11 +1632,6 @@ _codecs_code_page_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 4) { goto skip_optional; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } final = _PyLong_AsInt(args[3]); if (final == -1 && PyErr_Occurred()) { goto exit; @@ -1973,11 +1893,6 @@ _codecs_utf_16_encode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } byteorder = _PyLong_AsInt(args[2]); if (byteorder == -1 && PyErr_Occurred()) { goto exit; @@ -2160,11 +2075,6 @@ _codecs_utf_32_encode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } byteorder = _PyLong_AsInt(args[2]); if (byteorder == -1 && PyErr_Occurred()) { goto exit; @@ -2765,11 +2675,6 @@ _codecs_code_page_encode(PyObject *module, PyObject *const *args, Py_ssize_t nar if (!_PyArg_CheckPositional("code_page_encode", nargs, 2, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } code_page = _PyLong_AsInt(args[0]); if (code_page == -1 && PyErr_Occurred()) { goto exit; @@ -2922,4 +2827,4 @@ _codecs_lookup_error(PyObject *module, PyObject *arg) #ifndef _CODECS_CODE_PAGE_ENCODE_METHODDEF #define _CODECS_CODE_PAGE_ENCODE_METHODDEF #endif /* !defined(_CODECS_CODE_PAGE_ENCODE_METHODDEF) */ -/*[clinic end generated code: output=51b42d170889524c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=eeead01414be6e42 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_collectionsmodule.c.h b/Modules/clinic/_collectionsmodule.c.h index c3ba1a6698571..0cc1466549d72 100644 --- a/Modules/clinic/_collectionsmodule.c.h +++ b/Modules/clinic/_collectionsmodule.c.h @@ -50,11 +50,6 @@ tuplegetter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) if (!_PyArg_CheckPositional("_tuplegetter", PyTuple_GET_SIZE(args), 2, 2)) { goto exit; } - if (PyFloat_Check(PyTuple_GET_ITEM(args, 0))) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(PyTuple_GET_ITEM(args, 0)); @@ -73,4 +68,4 @@ tuplegetter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=9d2bfcc9df5faf35 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=439d77631a056b4d input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_curses_panel.c.h b/Modules/clinic/_curses_panel.c.h index 9840ed86e7912..cff274657658a 100644 --- a/Modules/clinic/_curses_panel.c.h +++ b/Modules/clinic/_curses_panel.c.h @@ -152,20 +152,10 @@ _curses_panel_panel_move(PyCursesPanelObject *self, PyObject *const *args, Py_ss if (!_PyArg_CheckPositional("move", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } y = _PyLong_AsInt(args[0]); if (y == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } x = _PyLong_AsInt(args[1]); if (x == -1 && PyErr_Occurred()) { goto exit; @@ -335,4 +325,4 @@ _curses_panel_update_panels(PyObject *module, PyObject *Py_UNUSED(ignored)) { return _curses_panel_update_panels_impl(module); } -/*[clinic end generated code: output=d96dc1fd68e898d9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1226d5f94361ebfb input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_cursesmodule.c.h b/Modules/clinic/_cursesmodule.c.h index 50d7f213e04e6..f686ded51976c 100644 --- a/Modules/clinic/_cursesmodule.c.h +++ b/Modules/clinic/_cursesmodule.c.h @@ -252,11 +252,6 @@ _curses_window_bkgd(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_ if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } attr = PyLong_AsLong(args[1]); if (attr == -1 && PyErr_Occurred()) { goto exit; @@ -286,11 +281,6 @@ _curses_window_attroff(PyCursesWindowObject *self, PyObject *arg) PyObject *return_value = NULL; long attr; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } attr = PyLong_AsLong(arg); if (attr == -1 && PyErr_Occurred()) { goto exit; @@ -319,11 +309,6 @@ _curses_window_attron(PyCursesWindowObject *self, PyObject *arg) PyObject *return_value = NULL; long attr; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } attr = PyLong_AsLong(arg); if (attr == -1 && PyErr_Occurred()) { goto exit; @@ -352,11 +337,6 @@ _curses_window_attrset(PyCursesWindowObject *self, PyObject *arg) PyObject *return_value = NULL; long attr; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } attr = PyLong_AsLong(arg); if (attr == -1 && PyErr_Occurred()) { goto exit; @@ -399,11 +379,6 @@ _curses_window_bkgdset(PyCursesWindowObject *self, PyObject *const *args, Py_ssi if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } attr = PyLong_AsLong(args[1]); if (attr == -1 && PyErr_Occurred()) { goto exit; @@ -687,11 +662,6 @@ _curses_window_echochar(PyCursesWindowObject *self, PyObject *const *args, Py_ss if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } attr = PyLong_AsLong(args[1]); if (attr == -1 && PyErr_Occurred()) { goto exit; @@ -733,20 +703,10 @@ _curses_window_enclose(PyCursesWindowObject *self, PyObject *const *args, Py_ssi if (!_PyArg_CheckPositional("enclose", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } y = _PyLong_AsInt(args[0]); if (y == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } x = _PyLong_AsInt(args[1]); if (x == -1 && PyErr_Occurred()) { goto exit; @@ -1303,11 +1263,6 @@ _curses_window_is_linetouched(PyCursesWindowObject *self, PyObject *arg) PyObject *return_value = NULL; int line; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } line = _PyLong_AsInt(arg); if (line == -1 && PyErr_Occurred()) { goto exit; @@ -1552,20 +1507,10 @@ _curses_window_redrawln(PyCursesWindowObject *self, PyObject *const *args, Py_ss if (!_PyArg_CheckPositional("redrawln", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } beg = _PyLong_AsInt(args[0]); if (beg == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } num = _PyLong_AsInt(args[1]); if (num == -1 && PyErr_Occurred()) { goto exit; @@ -1661,20 +1606,10 @@ _curses_window_setscrreg(PyCursesWindowObject *self, PyObject *const *args, Py_s if (!_PyArg_CheckPositional("setscrreg", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } top = _PyLong_AsInt(args[0]); if (top == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } bottom = _PyLong_AsInt(args[1]); if (bottom == -1 && PyErr_Occurred()) { goto exit; @@ -2005,11 +1940,6 @@ _curses_cbreak(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(args[0]); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -2045,11 +1975,6 @@ _curses_color_content(PyObject *module, PyObject *arg) PyObject *return_value = NULL; short color_number; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) { @@ -2099,11 +2024,6 @@ _curses_color_pair(PyObject *module, PyObject *arg) PyObject *return_value = NULL; short color_number; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) { @@ -2155,11 +2075,6 @@ _curses_curs_set(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int visibility; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } visibility = _PyLong_AsInt(arg); if (visibility == -1 && PyErr_Occurred()) { goto exit; @@ -2235,11 +2150,6 @@ _curses_delay_output(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int ms; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } ms = _PyLong_AsInt(arg); if (ms == -1 && PyErr_Occurred()) { goto exit; @@ -2297,11 +2207,6 @@ _curses_echo(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(args[0]); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -2469,11 +2374,6 @@ _curses_ungetmouse(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("ungetmouse", nargs, 5, 5)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[0]); if (ival == -1 && PyErr_Occurred()) { @@ -2493,29 +2393,14 @@ _curses_ungetmouse(PyObject *module, PyObject *const *args, Py_ssize_t nargs) id = (short) ival; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } x = _PyLong_AsInt(args[1]); if (x == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } y = _PyLong_AsInt(args[2]); if (y == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } z = _PyLong_AsInt(args[3]); if (z == -1 && PyErr_Occurred()) { goto exit; @@ -2568,11 +2453,6 @@ _curses_halfdelay(PyObject *module, PyObject *arg) PyObject *return_value = NULL; unsigned char tenths; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) { @@ -2675,11 +2555,6 @@ _curses_has_key(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int key; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } key = _PyLong_AsInt(arg); if (key == -1 && PyErr_Occurred()) { goto exit; @@ -2730,11 +2605,6 @@ _curses_init_color(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("init_color", nargs, 4, 4)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[0]); if (ival == -1 && PyErr_Occurred()) { @@ -2754,11 +2624,6 @@ _curses_init_color(PyObject *module, PyObject *const *args, Py_ssize_t nargs) color_number = (short) ival; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[1]); if (ival == -1 && PyErr_Occurred()) { @@ -2778,11 +2643,6 @@ _curses_init_color(PyObject *module, PyObject *const *args, Py_ssize_t nargs) r = (short) ival; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[2]); if (ival == -1 && PyErr_Occurred()) { @@ -2802,11 +2662,6 @@ _curses_init_color(PyObject *module, PyObject *const *args, Py_ssize_t nargs) g = (short) ival; } } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[3]); if (ival == -1 && PyErr_Occurred()) { @@ -2866,11 +2721,6 @@ _curses_init_pair(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("init_pair", nargs, 3, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[0]); if (ival == -1 && PyErr_Occurred()) { @@ -2890,11 +2740,6 @@ _curses_init_pair(PyObject *module, PyObject *const *args, Py_ssize_t nargs) pair_number = (short) ival; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[1]); if (ival == -1 && PyErr_Occurred()) { @@ -2914,11 +2759,6 @@ _curses_init_pair(PyObject *module, PyObject *const *args, Py_ssize_t nargs) fg = (short) ival; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(args[2]); if (ival == -1 && PyErr_Occurred()) { @@ -3024,11 +2864,6 @@ _curses_setupterm(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[1]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -3093,11 +2928,6 @@ _curses_set_escdelay(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int ms; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } ms = _PyLong_AsInt(arg); if (ms == -1 && PyErr_Occurred()) { goto exit; @@ -3161,11 +2991,6 @@ _curses_set_tabsize(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int size; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } size = _PyLong_AsInt(arg); if (size == -1 && PyErr_Occurred()) { goto exit; @@ -3195,11 +3020,6 @@ _curses_intrflush(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int flag; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(arg); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -3257,20 +3077,10 @@ _curses_is_term_resized(PyObject *module, PyObject *const *args, Py_ssize_t narg if (!_PyArg_CheckPositional("is_term_resized", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } nlines = _PyLong_AsInt(args[0]); if (nlines == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } ncols = _PyLong_AsInt(args[1]); if (ncols == -1 && PyErr_Occurred()) { goto exit; @@ -3304,11 +3114,6 @@ _curses_keyname(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int key; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } key = _PyLong_AsInt(arg); if (key == -1 && PyErr_Occurred()) { goto exit; @@ -3379,11 +3184,6 @@ _curses_meta(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int yes; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } yes = _PyLong_AsInt(arg); if (yes == -1 && PyErr_Occurred()) { goto exit; @@ -3421,11 +3221,6 @@ _curses_mouseinterval(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int interval; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } interval = _PyLong_AsInt(arg); if (interval == -1 && PyErr_Occurred()) { goto exit; @@ -3497,11 +3292,6 @@ _curses_napms(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int ms; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } ms = _PyLong_AsInt(arg); if (ms == -1 && PyErr_Occurred()) { goto exit; @@ -3539,20 +3329,10 @@ _curses_newpad(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("newpad", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } nlines = _PyLong_AsInt(args[0]); if (nlines == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } ncols = _PyLong_AsInt(args[1]); if (ncols == -1 && PyErr_Occurred()) { goto exit; @@ -3648,11 +3428,6 @@ _curses_nl(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(args[0]); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -3787,11 +3562,6 @@ _curses_pair_content(PyObject *module, PyObject *arg) PyObject *return_value = NULL; short pair_number; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { long ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) { @@ -3837,11 +3607,6 @@ _curses_pair_number(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int attr; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } attr = _PyLong_AsInt(arg); if (attr == -1 && PyErr_Occurred()) { goto exit; @@ -3911,11 +3676,6 @@ _curses_qiflush(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(args[0]); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -3979,11 +3739,6 @@ _curses_raw(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(args[0]); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -4081,20 +3836,10 @@ _curses_resizeterm(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("resizeterm", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } nlines = _PyLong_AsInt(args[0]); if (nlines == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } ncols = _PyLong_AsInt(args[1]); if (ncols == -1 && PyErr_Occurred()) { goto exit; @@ -4142,20 +3887,10 @@ _curses_resize_term(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("resize_term", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } nlines = _PyLong_AsInt(args[0]); if (nlines == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } ncols = _PyLong_AsInt(args[1]); if (ncols == -1 && PyErr_Occurred()) { goto exit; @@ -4217,20 +3952,10 @@ _curses_setsyx(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("setsyx", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } y = _PyLong_AsInt(args[0]); if (y == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } x = _PyLong_AsInt(args[1]); if (x == -1 && PyErr_Occurred()) { goto exit; @@ -4500,11 +4225,6 @@ _curses_typeahead(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int fd; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -4580,11 +4300,6 @@ _curses_use_env(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int flag; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(arg); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -4713,4 +4428,4 @@ _curses_use_default_colors(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _CURSES_USE_DEFAULT_COLORS_METHODDEF #define _CURSES_USE_DEFAULT_COLORS_METHODDEF #endif /* !defined(_CURSES_USE_DEFAULT_COLORS_METHODDEF) */ -/*[clinic end generated code: output=b53652f8acafd817 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=478d93f7692385eb input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_datetimemodule.c.h b/Modules/clinic/_datetimemodule.c.h index 973a4ea025347..7bd7c1986dc7b 100644 --- a/Modules/clinic/_datetimemodule.c.h +++ b/Modules/clinic/_datetimemodule.c.h @@ -35,29 +35,14 @@ iso_calendar_date_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) if (!fastargs) { goto exit; } - if (PyFloat_Check(fastargs[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } year = _PyLong_AsInt(fastargs[0]); if (year == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } week = _PyLong_AsInt(fastargs[1]); if (week == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(fastargs[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } weekday = _PyLong_AsInt(fastargs[2]); if (weekday == -1 && PyErr_Occurred()) { goto exit; @@ -109,4 +94,4 @@ datetime_datetime_now(PyTypeObject *type, PyObject *const *args, Py_ssize_t narg exit: return return_value; } -/*[clinic end generated code: output=5e17549f29a439a5 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f61310936e3d8091 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_dbmmodule.c.h b/Modules/clinic/_dbmmodule.c.h index a7d735085068d..edf29be92af9b 100644 --- a/Modules/clinic/_dbmmodule.c.h +++ b/Modules/clinic/_dbmmodule.c.h @@ -162,11 +162,6 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[2]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -177,4 +172,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=7ced103488cbca7a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ba4ff07b8c8bbfe4 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_elementtree.c.h b/Modules/clinic/_elementtree.c.h index 825416f4a3982..5934218a52b95 100644 --- a/Modules/clinic/_elementtree.c.h +++ b/Modules/clinic/_elementtree.c.h @@ -430,11 +430,6 @@ _elementtree_Element_insert(ElementObject *self, PyObject *const *args, Py_ssize if (!_PyArg_CheckPositional("insert", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -920,4 +915,4 @@ _elementtree_XMLParser__setevents(XMLParserObject *self, PyObject *const *args, exit: return return_value; } -/*[clinic end generated code: output=b7f6a32462fc42a9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c98b210c525a9338 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_gdbmmodule.c.h b/Modules/clinic/_gdbmmodule.c.h index aa37a24d3b211..195159104d995 100644 --- a/Modules/clinic/_gdbmmodule.c.h +++ b/Modules/clinic/_gdbmmodule.c.h @@ -283,11 +283,6 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[2]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -298,4 +293,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=2766471b2fa1a816 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c9d43f42677f4efb input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index 5ab4e996bf96b..619cb1c8516b8 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -92,11 +92,6 @@ EVPXOF_digest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -144,11 +139,6 @@ EVPXOF_hexdigest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObj if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -936,11 +926,6 @@ pbkdf2_hmac(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject _PyArg_BadArgument("pbkdf2_hmac", "argument 'salt'", "contiguous buffer", args[2]); goto exit; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } iterations = PyLong_AsLong(args[3]); if (iterations == -1 && PyErr_Occurred()) { goto exit; @@ -1055,11 +1040,6 @@ _hashlib_scrypt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[5]) { - if (PyFloat_Check(args[5])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } maxmem = PyLong_AsLong(args[5]); if (maxmem == -1 && PyErr_Occurred()) { goto exit; @@ -1068,11 +1048,6 @@ _hashlib_scrypt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj goto skip_optional_kwonly; } } - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } dklen = PyLong_AsLong(args[6]); if (dklen == -1 && PyErr_Occurred()) { goto exit; @@ -1402,4 +1377,4 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF #define _HASHLIB_GET_FIPS_MODE_METHODDEF #endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */ -/*[clinic end generated code: output=a0bff5dcef88de6a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d8dddcd85fb11dde input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_lzmamodule.c.h b/Modules/clinic/_lzmamodule.c.h index 82ef4d517d83c..40913ef295489 100644 --- a/Modules/clinic/_lzmamodule.c.h +++ b/Modules/clinic/_lzmamodule.c.h @@ -116,11 +116,6 @@ _lzma_LZMADecompressor_decompress(Decompressor *self, PyObject *const *args, Py_ if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -194,11 +189,6 @@ _lzma_LZMADecompressor___init__(PyObject *self, PyObject *args, PyObject *kwargs goto skip_optional_pos; } if (fastargs[0]) { - if (PyFloat_Check(fastargs[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } format = _PyLong_AsInt(fastargs[0]); if (format == -1 && PyErr_Occurred()) { goto exit; @@ -241,11 +231,6 @@ _lzma_is_check_supported(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int check_id; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } check_id = _PyLong_AsInt(arg); if (check_id == -1 && PyErr_Occurred()) { goto exit; @@ -334,4 +319,4 @@ _lzma__decode_filter_properties(PyObject *module, PyObject *const *args, Py_ssiz return return_value; } -/*[clinic end generated code: output=f7477a10e86a717d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a87074ca902bd432 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_opcode.c.h b/Modules/clinic/_opcode.c.h index 777701ff14095..6915f21d64444 100644 --- a/Modules/clinic/_opcode.c.h +++ b/Modules/clinic/_opcode.c.h @@ -32,11 +32,6 @@ _opcode_stack_effect(PyObject *module, PyObject *const *args, Py_ssize_t nargs, if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } opcode = _PyLong_AsInt(args[0]); if (opcode == -1 && PyErr_Occurred()) { goto exit; @@ -61,4 +56,4 @@ _opcode_stack_effect(PyObject *module, PyObject *const *args, Py_ssize_t nargs, exit: return return_value; } -/*[clinic end generated code: output=7bc08f2835b2cf89 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=bcf66d25c2624197 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_operator.c.h b/Modules/clinic/_operator.c.h index f9e353d86b496..2a66f8ff1a542 100644 --- a/Modules/clinic/_operator.c.h +++ b/Modules/clinic/_operator.c.h @@ -1424,11 +1424,6 @@ _operator_length_hint(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -1491,4 +1486,4 @@ _operator__compare_digest(PyObject *module, PyObject *const *args, Py_ssize_t na exit: return return_value; } -/*[clinic end generated code: output=e7ed71a8c475a901 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1fe4adf4f5761420 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_randommodule.c.h b/Modules/clinic/_randommodule.c.h index a467811d93b27..b3cd435b6f204 100644 --- a/Modules/clinic/_randommodule.c.h +++ b/Modules/clinic/_randommodule.c.h @@ -100,11 +100,6 @@ _random_Random_getrandbits(RandomObject *self, PyObject *arg) PyObject *return_value = NULL; int k; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } k = _PyLong_AsInt(arg); if (k == -1 && PyErr_Occurred()) { goto exit; @@ -114,4 +109,4 @@ _random_Random_getrandbits(RandomObject *self, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=a7feb0c9c8d1b627 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=cc8a23b2757dc6ba input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_sre.c.h b/Modules/clinic/_sre.c.h index d398a8504ba41..fc3ae04792f88 100644 --- a/Modules/clinic/_sre.c.h +++ b/Modules/clinic/_sre.c.h @@ -47,11 +47,6 @@ _sre_ascii_iscased(PyObject *module, PyObject *arg) int character; int _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } character = _PyLong_AsInt(arg); if (character == -1 && PyErr_Occurred()) { goto exit; @@ -84,11 +79,6 @@ _sre_unicode_iscased(PyObject *module, PyObject *arg) int character; int _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } character = _PyLong_AsInt(arg); if (character == -1 && PyErr_Occurred()) { goto exit; @@ -121,11 +111,6 @@ _sre_ascii_tolower(PyObject *module, PyObject *arg) int character; int _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } character = _PyLong_AsInt(arg); if (character == -1 && PyErr_Occurred()) { goto exit; @@ -158,11 +143,6 @@ _sre_unicode_tolower(PyObject *module, PyObject *arg) int character; int _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } character = _PyLong_AsInt(arg); if (character == -1 && PyErr_Occurred()) { goto exit; @@ -211,11 +191,6 @@ _sre_SRE_Pattern_match(PatternObject *self, PyObject *const *args, Py_ssize_t na goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -232,11 +207,6 @@ _sre_SRE_Pattern_match(PatternObject *self, PyObject *const *args, Py_ssize_t na goto skip_optional_pos; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -290,11 +260,6 @@ _sre_SRE_Pattern_fullmatch(PatternObject *self, PyObject *const *args, Py_ssize_ goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -311,11 +276,6 @@ _sre_SRE_Pattern_fullmatch(PatternObject *self, PyObject *const *args, Py_ssize_ goto skip_optional_pos; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -371,11 +331,6 @@ _sre_SRE_Pattern_search(PatternObject *self, PyObject *const *args, Py_ssize_t n goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -392,11 +347,6 @@ _sre_SRE_Pattern_search(PatternObject *self, PyObject *const *args, Py_ssize_t n goto skip_optional_pos; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -450,11 +400,6 @@ _sre_SRE_Pattern_findall(PatternObject *self, PyObject *const *args, Py_ssize_t goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -471,11 +416,6 @@ _sre_SRE_Pattern_findall(PatternObject *self, PyObject *const *args, Py_ssize_t goto skip_optional_pos; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -531,11 +471,6 @@ _sre_SRE_Pattern_finditer(PatternObject *self, PyObject *const *args, Py_ssize_t goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -552,11 +487,6 @@ _sre_SRE_Pattern_finditer(PatternObject *self, PyObject *const *args, Py_ssize_t goto skip_optional_pos; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -609,11 +539,6 @@ _sre_SRE_Pattern_scanner(PatternObject *self, PyObject *const *args, Py_ssize_t goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -630,11 +555,6 @@ _sre_SRE_Pattern_scanner(PatternObject *self, PyObject *const *args, Py_ssize_t goto skip_optional_pos; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -686,11 +606,6 @@ _sre_SRE_Pattern_split(PatternObject *self, PyObject *const *args, Py_ssize_t na if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -744,11 +659,6 @@ _sre_SRE_Pattern_sub(PatternObject *self, PyObject *const *args, Py_ssize_t narg if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -802,11 +712,6 @@ _sre_SRE_Pattern_subn(PatternObject *self, PyObject *const *args, Py_ssize_t nar if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -884,11 +789,6 @@ _sre_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject goto exit; } pattern = args[0]; - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[1]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -898,11 +798,6 @@ _sre_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject goto exit; } code = args[2]; - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[3]); @@ -1207,4 +1102,4 @@ _sre_SRE_Scanner_search(ScannerObject *self, PyObject *Py_UNUSED(ignored)) { return _sre_SRE_Scanner_search_impl(self); } -/*[clinic end generated code: output=1adeddce58ae284c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=7a3360917b40a808 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_ssl.c.h b/Modules/clinic/_ssl.c.h index ce8669ae212ed..2375f83b325c7 100644 --- a/Modules/clinic/_ssl.c.h +++ b/Modules/clinic/_ssl.c.h @@ -406,11 +406,6 @@ _ssl__SSLContext(PyTypeObject *type, PyObject *args, PyObject *kwargs) if (!_PyArg_CheckPositional("_SSLContext", PyTuple_GET_SIZE(args), 1, 1)) { goto exit; } - if (PyFloat_Check(PyTuple_GET_ITEM(args, 0))) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } proto_version = _PyLong_AsInt(PyTuple_GET_ITEM(args, 0)); if (proto_version == -1 && PyErr_Occurred()) { goto exit; @@ -694,11 +689,6 @@ _ssl__SSLContext__wrap_socket(PySSLContext *self, PyObject *const *args, Py_ssiz goto exit; } sock = args[0]; - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } server_side = _PyLong_AsInt(args[1]); if (server_side == -1 && PyErr_Occurred()) { goto exit; @@ -774,11 +764,6 @@ _ssl__SSLContext__wrap_bio(PySSLContext *self, PyObject *const *args, Py_ssize_t goto exit; } outgoing = (PySSLMemoryBIO *)args[1]; - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } server_side = _PyLong_AsInt(args[2]); if (server_side == -1 && PyErr_Occurred()) { goto exit; @@ -977,11 +962,6 @@ _ssl_MemoryBIO_read(PySSLMemoryBIO *self, PyObject *const *args, Py_ssize_t narg if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } len = _PyLong_AsInt(args[0]); if (len == -1 && PyErr_Occurred()) { goto exit; @@ -1132,11 +1112,6 @@ _ssl_RAND_bytes(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int n; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } n = _PyLong_AsInt(arg); if (n == -1 && PyErr_Occurred()) { goto exit; @@ -1168,11 +1143,6 @@ _ssl_RAND_pseudo_bytes(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int n; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } n = _PyLong_AsInt(arg); if (n == -1 && PyErr_Occurred()) { goto exit; @@ -1333,11 +1303,6 @@ _ssl_nid2obj(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int nid; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } nid = _PyLong_AsInt(arg); if (nid == -1 && PyErr_Occurred()) { goto exit; @@ -1482,4 +1447,4 @@ _ssl_enum_crls(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje #ifndef _SSL_ENUM_CRLS_METHODDEF #define _SSL_ENUM_CRLS_METHODDEF #endif /* !defined(_SSL_ENUM_CRLS_METHODDEF) */ -/*[clinic end generated code: output=a4aeb3f92a091c64 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d4e4f9cdd08819f4 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_struct.c.h b/Modules/clinic/_struct.c.h index 36c4b4046cd73..874f30a445008 100644 --- a/Modules/clinic/_struct.c.h +++ b/Modules/clinic/_struct.c.h @@ -124,11 +124,6 @@ Struct_unpack_from(PyStructObject *self, PyObject *const *args, Py_ssize_t nargs if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -315,11 +310,6 @@ unpack_from(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -386,4 +376,4 @@ iter_unpack(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } -/*[clinic end generated code: output=6a6228cfc4b7099c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1205daf7f616f0cf input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_tkinter.c.h b/Modules/clinic/_tkinter.c.h index 73c3faeaf962e..9718986838fbb 100644 --- a/Modules/clinic/_tkinter.c.h +++ b/Modules/clinic/_tkinter.c.h @@ -434,11 +434,6 @@ _tkinter_tkapp_createfilehandler(TkappObject *self, PyObject *const *args, Py_ss goto exit; } file = args[0]; - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mask = _PyLong_AsInt(args[1]); if (mask == -1 && PyErr_Occurred()) { goto exit; @@ -503,11 +498,6 @@ _tkinter_tkapp_createtimerhandler(TkappObject *self, PyObject *const *args, Py_s if (!_PyArg_CheckPositional("createtimerhandler", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } milliseconds = _PyLong_AsInt(args[0]); if (milliseconds == -1 && PyErr_Occurred()) { goto exit; @@ -542,11 +532,6 @@ _tkinter_tkapp_mainloop(TkappObject *self, PyObject *const *args, Py_ssize_t nar if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } threshold = _PyLong_AsInt(args[0]); if (threshold == -1 && PyErr_Occurred()) { goto exit; @@ -581,11 +566,6 @@ _tkinter_tkapp_dooneevent(TkappObject *self, PyObject *const *args, Py_ssize_t n if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[0]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -769,11 +749,6 @@ _tkinter_create(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 4) { goto skip_optional; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } interactive = _PyLong_AsInt(args[3]); if (interactive == -1 && PyErr_Occurred()) { goto exit; @@ -781,11 +756,6 @@ _tkinter_create(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 5) { goto skip_optional; } - if (PyFloat_Check(args[4])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } wantobjects = _PyLong_AsInt(args[4]); if (wantobjects == -1 && PyErr_Occurred()) { goto exit; @@ -793,11 +763,6 @@ _tkinter_create(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 6) { goto skip_optional; } - if (PyFloat_Check(args[5])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } wantTk = _PyLong_AsInt(args[5]); if (wantTk == -1 && PyErr_Occurred()) { goto exit; @@ -805,11 +770,6 @@ _tkinter_create(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 7) { goto skip_optional; } - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } sync = _PyLong_AsInt(args[6]); if (sync == -1 && PyErr_Occurred()) { goto exit; @@ -862,11 +822,6 @@ _tkinter_setbusywaitinterval(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int new_val; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } new_val = _PyLong_AsInt(arg); if (new_val == -1 && PyErr_Occurred()) { goto exit; @@ -912,4 +867,4 @@ _tkinter_getbusywaitinterval(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF #define _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF #endif /* !defined(_TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF) */ -/*[clinic end generated code: output=492b8b833fe54bc9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ab311480dd044fe4 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_tracemalloc.c.h b/Modules/clinic/_tracemalloc.c.h index 049cacd832663..20afd76f06f11 100644 --- a/Modules/clinic/_tracemalloc.c.h +++ b/Modules/clinic/_tracemalloc.c.h @@ -101,11 +101,6 @@ _tracemalloc_start(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } nframe = _PyLong_AsInt(args[0]); if (nframe == -1 && PyErr_Occurred()) { goto exit; @@ -217,4 +212,4 @@ _tracemalloc_reset_peak(PyObject *module, PyObject *Py_UNUSED(ignored)) { return _tracemalloc_reset_peak_impl(module); } -/*[clinic end generated code: output=a130117b1af821da input=a9049054013a1b77]*/ +/*[clinic end generated code: output=bafca0a19b0b0823 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/arraymodule.c.h b/Modules/clinic/arraymodule.c.h index b9245ca91d5fa..334db39db16ea 100644 --- a/Modules/clinic/arraymodule.c.h +++ b/Modules/clinic/arraymodule.c.h @@ -82,11 +82,6 @@ array_array_pop(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -137,11 +132,6 @@ array_array_insert(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("insert", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -253,11 +243,6 @@ array_array_fromfile(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } f = args[0]; - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -483,11 +468,6 @@ array__array_reconstructor(PyObject *module, PyObject *const *args, Py_ssize_t n goto exit; } typecode = PyUnicode_READ_CHAR(args[1], 0); - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mformat_code = _PyLong_AsInt(args[2]); if (mformat_code == -1 && PyErr_Occurred()) { goto exit; @@ -534,4 +514,4 @@ PyDoc_STRVAR(array_arrayiterator___setstate____doc__, #define ARRAY_ARRAYITERATOR___SETSTATE___METHODDEF \ {"__setstate__", (PyCFunction)array_arrayiterator___setstate__, METH_O, array_arrayiterator___setstate____doc__}, -/*[clinic end generated code: output=9f70748dd3bc532f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c953eb8486c7c8da input=a9049054013a1b77]*/ diff --git a/Modules/clinic/audioop.c.h b/Modules/clinic/audioop.c.h index 8745533eeb629..56d31d3d721eb 100644 --- a/Modules/clinic/audioop.c.h +++ b/Modules/clinic/audioop.c.h @@ -33,20 +33,10 @@ audioop_getsample(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("getsample", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -99,11 +89,6 @@ audioop_max(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("max", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -148,11 +133,6 @@ audioop_minmax(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("minmax", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -197,11 +177,6 @@ audioop_avg(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("avg", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -246,11 +221,6 @@ audioop_rms(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("rms", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -400,11 +370,6 @@ audioop_findmax(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("findmax", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -457,11 +422,6 @@ audioop_avgpp(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("avgpp", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -506,11 +466,6 @@ audioop_maxpp(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("maxpp", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -555,11 +510,6 @@ audioop_cross(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("cross", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -606,11 +556,6 @@ audioop_mul(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("mul", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -668,11 +613,6 @@ audioop_tomono(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("tomono", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -740,11 +680,6 @@ audioop_tostereo(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("tostereo", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -818,11 +753,6 @@ audioop_add(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("add", "argument 2", "contiguous buffer", args[1]); goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[2]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -872,20 +802,10 @@ audioop_bias(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("bias", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } bias = _PyLong_AsInt(args[2]); if (bias == -1 && PyErr_Occurred()) { goto exit; @@ -930,11 +850,6 @@ audioop_reverse(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("reverse", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -979,11 +894,6 @@ audioop_byteswap(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("byteswap", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -1030,20 +940,10 @@ audioop_lin2lin(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("lin2lin", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } newwidth = _PyLong_AsInt(args[2]); if (newwidth == -1 && PyErr_Occurred()) { goto exit; @@ -1097,38 +997,18 @@ audioop_ratecv(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("ratecv", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } nchannels = _PyLong_AsInt(args[2]); if (nchannels == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } inrate = _PyLong_AsInt(args[3]); if (inrate == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[4])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } outrate = _PyLong_AsInt(args[4]); if (outrate == -1 && PyErr_Occurred()) { goto exit; @@ -1137,11 +1017,6 @@ audioop_ratecv(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 7) { goto skip_optional; } - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } weightA = _PyLong_AsInt(args[6]); if (weightA == -1 && PyErr_Occurred()) { goto exit; @@ -1149,11 +1024,6 @@ audioop_ratecv(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 8) { goto skip_optional; } - if (PyFloat_Check(args[7])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } weightB = _PyLong_AsInt(args[7]); if (weightB == -1 && PyErr_Occurred()) { goto exit; @@ -1199,11 +1069,6 @@ audioop_lin2ulaw(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("lin2ulaw", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -1248,11 +1113,6 @@ audioop_ulaw2lin(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("ulaw2lin", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -1297,11 +1157,6 @@ audioop_lin2alaw(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("lin2alaw", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -1346,11 +1201,6 @@ audioop_alaw2lin(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("alaw2lin", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -1397,11 +1247,6 @@ audioop_lin2adpcm(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("lin2adpcm", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -1449,11 +1294,6 @@ audioop_adpcm2lin(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("adpcm2lin", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } width = _PyLong_AsInt(args[1]); if (width == -1 && PyErr_Occurred()) { goto exit; @@ -1469,4 +1309,4 @@ audioop_adpcm2lin(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } -/*[clinic end generated code: output=6b4f2c597f295abc input=a9049054013a1b77]*/ +/*[clinic end generated code: output=343e5ae478fc0359 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/binascii.c.h b/Modules/clinic/binascii.c.h index 4d02c72c472e1..ae1c4574325c5 100644 --- a/Modules/clinic/binascii.c.h +++ b/Modules/clinic/binascii.c.h @@ -70,11 +70,6 @@ binascii_b2a_uu(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj if (!noptargs) { goto skip_optional_kwonly; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } backtick = _PyLong_AsInt(args[1]); if (backtick == -1 && PyErr_Occurred()) { goto exit; @@ -159,11 +154,6 @@ binascii_b2a_base64(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P if (!noptargs) { goto skip_optional_kwonly; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } newline = _PyLong_AsInt(args[1]); if (newline == -1 && PyErr_Occurred()) { goto exit; @@ -348,11 +338,6 @@ binascii_crc_hqx(PyObject *module, PyObject *const *args, Py_ssize_t nargs) _PyArg_BadArgument("crc_hqx", "argument 1", "contiguous buffer", args[0]); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } crc = (unsigned int)PyLong_AsUnsignedLongMask(args[1]); if (crc == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -401,11 +386,6 @@ binascii_crc32(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } crc = (unsigned int)PyLong_AsUnsignedLongMask(args[1]); if (crc == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -488,11 +468,6 @@ binascii_b2a_hex(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb goto skip_optional_pos; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } bytes_per_sep = _PyLong_AsInt(args[2]); if (bytes_per_sep == -1 && PyErr_Occurred()) { goto exit; @@ -563,11 +538,6 @@ binascii_hexlify(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb goto skip_optional_pos; } } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } bytes_per_sep = _PyLong_AsInt(args[2]); if (bytes_per_sep == -1 && PyErr_Occurred()) { goto exit; @@ -684,11 +654,6 @@ binascii_a2b_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } header = _PyLong_AsInt(args[1]); if (header == -1 && PyErr_Occurred()) { goto exit; @@ -749,11 +714,6 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } quotetabs = _PyLong_AsInt(args[1]); if (quotetabs == -1 && PyErr_Occurred()) { goto exit; @@ -763,11 +723,6 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } istext = _PyLong_AsInt(args[2]); if (istext == -1 && PyErr_Occurred()) { goto exit; @@ -776,11 +731,6 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj goto skip_optional_pos; } } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } header = _PyLong_AsInt(args[3]); if (header == -1 && PyErr_Occurred()) { goto exit; @@ -796,4 +746,4 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj return return_value; } -/*[clinic end generated code: output=a1e878d3963b615e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=95a0178f30801b89 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/fcntlmodule.c.h b/Modules/clinic/fcntlmodule.c.h index 024a44cfbf8bc..c6bf45fa494f0 100644 --- a/Modules/clinic/fcntlmodule.c.h +++ b/Modules/clinic/fcntlmodule.c.h @@ -38,11 +38,6 @@ fcntl_fcntl(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!conv_descriptor(args[0], &fd)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } code = _PyLong_AsInt(args[1]); if (code == -1 && PyErr_Occurred()) { goto exit; @@ -113,11 +108,6 @@ fcntl_ioctl(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!conv_descriptor(args[0], &fd)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } code = (unsigned int)PyLong_AsUnsignedLongMask(args[1]); if (code == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -168,11 +158,6 @@ fcntl_flock(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!conv_descriptor(args[0], &fd)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } code = _PyLong_AsInt(args[1]); if (code == -1 && PyErr_Occurred()) { goto exit; @@ -233,11 +218,6 @@ fcntl_lockf(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!conv_descriptor(args[0], &fd)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } code = _PyLong_AsInt(args[1]); if (code == -1 && PyErr_Occurred()) { goto exit; @@ -253,11 +233,6 @@ fcntl_lockf(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 5) { goto skip_optional; } - if (PyFloat_Check(args[4])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } whence = _PyLong_AsInt(args[4]); if (whence == -1 && PyErr_Occurred()) { goto exit; @@ -268,4 +243,4 @@ fcntl_lockf(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=e912d25e28362c52 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=91c2295402509595 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/gcmodule.c.h b/Modules/clinic/gcmodule.c.h index 72795c66bf728..30efc7e0c2cd7 100644 --- a/Modules/clinic/gcmodule.c.h +++ b/Modules/clinic/gcmodule.c.h @@ -102,11 +102,6 @@ gc_collect(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject * if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } generation = _PyLong_AsInt(args[0]); if (generation == -1 && PyErr_Occurred()) { goto exit; @@ -151,11 +146,6 @@ gc_set_debug(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int flags; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(arg); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -382,4 +372,4 @@ gc_get_freeze_count(PyObject *module, PyObject *Py_UNUSED(ignored)) exit: return return_value; } -/*[clinic end generated code: output=bd6a8056989e2e69 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=61e15f3a549f3ab5 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/itertoolsmodule.c.h b/Modules/clinic/itertoolsmodule.c.h index 20594b0fed4c3..c6d6717f89e3c 100644 --- a/Modules/clinic/itertoolsmodule.c.h +++ b/Modules/clinic/itertoolsmodule.c.h @@ -170,11 +170,6 @@ itertools_tee(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -356,11 +351,6 @@ itertools_combinations(PyTypeObject *type, PyObject *args, PyObject *kwargs) goto exit; } iterable = fastargs[0]; - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(fastargs[1]); @@ -409,11 +399,6 @@ itertools_combinations_with_replacement(PyTypeObject *type, PyObject *args, PyOb goto exit; } iterable = fastargs[0]; - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(fastargs[1]); @@ -642,4 +627,4 @@ itertools_count(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=392c9706e79f6710 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=07211f86c4153050 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index 41baa45573979..6f180abd25495 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -187,11 +187,6 @@ os_access(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k if (!path_converter(args[0], &path)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[1]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -257,11 +252,6 @@ os_ttyname(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int fd; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -431,11 +421,6 @@ os_chmod(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw if (!path_converter(args[0], &path)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[1]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -495,20 +480,10 @@ os_fchmod(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[1]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -555,11 +530,6 @@ os_lchmod(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k if (!path_converter(args[0], &path)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[1]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -952,11 +922,6 @@ os_fchown(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -1353,11 +1318,6 @@ os_mkdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[1]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -1403,11 +1363,6 @@ os_nice(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int increment; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } increment = _PyLong_AsInt(arg); if (increment == -1 && PyErr_Occurred()) { goto exit; @@ -1448,20 +1403,10 @@ os_getpriority(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } which = _PyLong_AsInt(args[0]); if (which == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } who = _PyLong_AsInt(args[1]); if (who == -1 && PyErr_Occurred()) { goto exit; @@ -1503,29 +1448,14 @@ os_setpriority(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } which = _PyLong_AsInt(args[0]); if (which == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } who = _PyLong_AsInt(args[1]); if (who == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } priority = _PyLong_AsInt(args[2]); if (priority == -1 && PyErr_Occurred()) { goto exit; @@ -1828,11 +1758,6 @@ os_umask(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int mask; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mask = _PyLong_AsInt(arg); if (mask == -1 && PyErr_Occurred()) { goto exit; @@ -2093,11 +2018,6 @@ os__exit(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } status = _PyLong_AsInt(args[0]); if (status == -1 && PyErr_Occurred()) { goto exit; @@ -2289,11 +2209,6 @@ os_posix_spawn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[5]) { - if (PyFloat_Check(args[5])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } resetids = _PyLong_AsInt(args[5]); if (resetids == -1 && PyErr_Occurred()) { goto exit; @@ -2303,11 +2218,6 @@ os_posix_spawn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[6]) { - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } setsid = _PyLong_AsInt(args[6]); if (setsid == -1 && PyErr_Occurred()) { goto exit; @@ -2426,11 +2336,6 @@ os_posix_spawnp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[5]) { - if (PyFloat_Check(args[5])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } resetids = _PyLong_AsInt(args[5]); if (resetids == -1 && PyErr_Occurred()) { goto exit; @@ -2440,11 +2345,6 @@ os_posix_spawnp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[6]) { - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } setsid = _PyLong_AsInt(args[6]); if (setsid == -1 && PyErr_Occurred()) { goto exit; @@ -2510,11 +2410,6 @@ os_spawnv(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("spawnv", nargs, 3, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[0]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -2570,11 +2465,6 @@ os_spawnve(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("spawnve", nargs, 4, 4)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[0]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -2738,11 +2628,6 @@ os_sched_get_priority_max(PyObject *module, PyObject *const *args, Py_ssize_t na if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } policy = _PyLong_AsInt(args[0]); if (policy == -1 && PyErr_Occurred()) { goto exit; @@ -2782,11 +2667,6 @@ os_sched_get_priority_min(PyObject *module, PyObject *const *args, Py_ssize_t na if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } policy = _PyLong_AsInt(args[0]); if (policy == -1 && PyErr_Occurred()) { goto exit; @@ -3288,11 +3168,6 @@ os_getgrouplist(PyObject *module, PyObject *const *args, Py_ssize_t nargs) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } basegid = _PyLong_AsInt(args[1]); if (basegid == -1 && PyErr_Occurred()) { goto exit; @@ -3411,11 +3286,6 @@ os_initgroups(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!PyUnicode_FSConverter(args[0], &oname)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } gid = _PyLong_AsInt(args[1]); if (gid == -1 && PyErr_Occurred()) { goto exit; @@ -3709,11 +3579,6 @@ os_plock(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int op; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } op = _PyLong_AsInt(arg); if (op == -1 && PyErr_Occurred()) { goto exit; @@ -3969,11 +3834,6 @@ os_wait3(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } options = _PyLong_AsInt(args[0]); if (options == -1 && PyErr_Occurred()) { goto exit; @@ -4477,11 +4337,6 @@ os_tcgetpgrp(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int fd; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -4565,11 +4420,6 @@ os_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwn if (!path_converter(args[0], &path)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[1]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -4578,11 +4428,6 @@ os_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwn goto skip_optional_pos; } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[2]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -4637,11 +4482,6 @@ os_close(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -4674,20 +4514,10 @@ os_closerange(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("closerange", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd_low = _PyLong_AsInt(args[0]); if (fd_low == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd_high = _PyLong_AsInt(args[1]); if (fd_high == -1 && PyErr_Occurred()) { goto exit; @@ -4717,11 +4547,6 @@ os_dup(PyObject *module, PyObject *arg) int fd; int _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -4765,20 +4590,10 @@ os_dup2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwn if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd2 = _PyLong_AsInt(args[1]); if (fd2 == -1 && PyErr_Occurred()) { goto exit; @@ -4833,20 +4648,10 @@ os_lockf(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("lockf", nargs, 3, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } command = _PyLong_AsInt(args[1]); if (command == -1 && PyErr_Occurred()) { goto exit; @@ -4889,11 +4694,6 @@ os_lseek(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("lseek", nargs, 3, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -4901,11 +4701,6 @@ os_lseek(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!Py_off_t_converter(args[1], &position)) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } how = _PyLong_AsInt(args[2]); if (how == -1 && PyErr_Occurred()) { goto exit; @@ -4942,20 +4737,10 @@ os_read(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("read", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -5007,11 +4792,6 @@ os_readv(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("readv", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -5057,20 +4837,10 @@ os_pread(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("pread", nargs, 3, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -5136,11 +4906,6 @@ os_preadv(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("preadv", nargs, 3, 4)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -5152,11 +4917,6 @@ os_preadv(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 4) { goto skip_optional; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[3]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -5197,11 +4957,6 @@ os_write(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("write", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -5265,20 +5020,10 @@ os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } out_fd = _PyLong_AsInt(args[0]); if (out_fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } in_fd = _PyLong_AsInt(args[1]); if (in_fd == -1 && PyErr_Occurred()) { goto exit; @@ -5304,11 +5049,6 @@ os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject goto skip_optional_pos; } } - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[6]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -5359,20 +5099,10 @@ os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } out_fd = _PyLong_AsInt(args[0]); if (out_fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } in_fd = _PyLong_AsInt(args[1]); if (in_fd == -1 && PyErr_Occurred()) { goto exit; @@ -5380,11 +5110,6 @@ os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!Py_off_t_converter(args[2], &offset)) { goto exit; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[3]); @@ -5412,11 +5137,6 @@ os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject goto skip_optional_pos; } } - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[6]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -5461,30 +5181,15 @@ os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } out_fd = _PyLong_AsInt(args[0]); if (out_fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } in_fd = _PyLong_AsInt(args[1]); if (in_fd == -1 && PyErr_Occurred()) { goto exit; } offobj = args[2]; - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[3]); @@ -5530,29 +5235,14 @@ os__fcopyfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("_fcopyfile", nargs, 3, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } in_fd = _PyLong_AsInt(args[0]); if (in_fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } out_fd = _PyLong_AsInt(args[1]); if (out_fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[2]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -5593,11 +5283,6 @@ os_fstat(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -5630,11 +5315,6 @@ os_isatty(PyObject *module, PyObject *arg) int fd; int _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -5700,11 +5380,6 @@ os_pipe2(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int flags; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(arg); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -5745,11 +5420,6 @@ os_writev(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("writev", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -5797,11 +5467,6 @@ os_pwrite(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("pwrite", nargs, 3, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -5875,11 +5540,6 @@ os_pwritev(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("pwritev", nargs, 3, 4)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -5891,11 +5551,6 @@ os_pwritev(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 4) { goto skip_optional; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[3]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -5961,29 +5616,14 @@ os_copy_file_range(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } src = _PyLong_AsInt(args[0]); if (src == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } dst = _PyLong_AsInt(args[1]); if (dst == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -6057,11 +5697,6 @@ os_mkfifo(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[1]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -6140,11 +5775,6 @@ os_mknod(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[1]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -6277,20 +5907,10 @@ os_makedev(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("makedev", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } major = _PyLong_AsInt(args[0]); if (major == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } minor = _PyLong_AsInt(args[1]); if (minor == -1 && PyErr_Occurred()) { goto exit; @@ -6331,11 +5951,6 @@ os_ftruncate(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("ftruncate", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -6428,11 +6043,6 @@ os_posix_fallocate(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("posix_fallocate", nargs, 3, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -6486,11 +6096,6 @@ os_posix_fadvise(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("posix_fadvise", nargs, 4, 4)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -6501,11 +6106,6 @@ os_posix_fadvise(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!Py_off_t_converter(args[2], &length)) { goto exit; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } advice = _PyLong_AsInt(args[3]); if (advice == -1 && PyErr_Occurred()) { goto exit; @@ -6697,11 +6297,6 @@ os_strerror(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int code; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } code = _PyLong_AsInt(arg); if (code == -1 && PyErr_Occurred()) { goto exit; @@ -6733,11 +6328,6 @@ os_WCOREDUMP(PyObject *module, PyObject *arg) int status; int _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } status = _PyLong_AsInt(arg); if (status == -1 && PyErr_Occurred()) { goto exit; @@ -6785,11 +6375,6 @@ os_WIFCONTINUED(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } status = _PyLong_AsInt(args[0]); if (status == -1 && PyErr_Occurred()) { goto exit; @@ -6834,11 +6419,6 @@ os_WIFSTOPPED(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } status = _PyLong_AsInt(args[0]); if (status == -1 && PyErr_Occurred()) { goto exit; @@ -6883,11 +6463,6 @@ os_WIFSIGNALED(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } status = _PyLong_AsInt(args[0]); if (status == -1 && PyErr_Occurred()) { goto exit; @@ -6932,11 +6507,6 @@ os_WIFEXITED(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } status = _PyLong_AsInt(args[0]); if (status == -1 && PyErr_Occurred()) { goto exit; @@ -6981,11 +6551,6 @@ os_WEXITSTATUS(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } status = _PyLong_AsInt(args[0]); if (status == -1 && PyErr_Occurred()) { goto exit; @@ -7030,11 +6595,6 @@ os_WTERMSIG(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } status = _PyLong_AsInt(args[0]); if (status == -1 && PyErr_Occurred()) { goto exit; @@ -7079,11 +6639,6 @@ os_WSTOPSIG(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } status = _PyLong_AsInt(args[0]); if (status == -1 && PyErr_Occurred()) { goto exit; @@ -7122,11 +6677,6 @@ os_fstatvfs(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int fd; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -7252,11 +6802,6 @@ os_fpathconf(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("fpathconf", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -7526,11 +7071,6 @@ os_device_encoding(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -7788,11 +7328,6 @@ os_setxattr(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject goto skip_optional_pos; } if (args[3]) { - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[3]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -7974,11 +7509,6 @@ os_urandom(PyObject *module, PyObject *arg) PyObject *return_value = NULL; Py_ssize_t size; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(arg); @@ -8031,11 +7561,6 @@ os_memfd_create(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = (unsigned int)PyLong_AsUnsignedLongMask(args[1]); if (flags == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -8090,11 +7615,6 @@ os_get_terminal_size(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -8149,11 +7669,6 @@ os_get_inheritable(PyObject *module, PyObject *arg) int fd; int _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -8190,20 +7705,10 @@ os_set_inheritable(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("set_inheritable", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } inheritable = _PyLong_AsInt(args[1]); if (inheritable == -1 && PyErr_Occurred()) { goto exit; @@ -8307,11 +7812,6 @@ os_get_blocking(PyObject *module, PyObject *arg) int fd; int _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -8355,20 +7855,10 @@ os_set_blocking(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("set_blocking", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } blocking = _PyLong_AsInt(args[1]); if (blocking == -1 && PyErr_Occurred()) { goto exit; @@ -8667,11 +8157,6 @@ os_getrandom(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -8687,11 +8172,6 @@ os_getrandom(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[1]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -9396,4 +8876,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=005919eaaef3f8e6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=b97bbc8cb5078540 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/pyexpat.c.h b/Modules/clinic/pyexpat.c.h index ee5907ca7e447..923ca6bfa4127 100644 --- a/Modules/clinic/pyexpat.c.h +++ b/Modules/clinic/pyexpat.c.h @@ -31,11 +31,6 @@ pyexpat_xmlparser_Parse(xmlparseobject *self, PyObject *const *args, Py_ssize_t if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } isfinal = _PyLong_AsInt(args[1]); if (isfinal == -1 && PyErr_Occurred()) { goto exit; @@ -221,11 +216,6 @@ pyexpat_xmlparser_SetParamEntityParsing(xmlparseobject *self, PyObject *arg) PyObject *return_value = NULL; int flag; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(arg); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -384,11 +374,6 @@ pyexpat_ErrorString(PyObject *module, PyObject *arg) PyObject *return_value = NULL; long code; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } code = PyLong_AsLong(arg); if (code == -1 && PyErr_Occurred()) { goto exit; @@ -402,4 +387,4 @@ pyexpat_ErrorString(PyObject *module, PyObject *arg) #ifndef PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #define PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #endif /* !defined(PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF) */ -/*[clinic end generated code: output=68ce25024280af41 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=14e37efc4ec10be2 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/resource.c.h b/Modules/clinic/resource.c.h index 80efb714bb6b6..32c092ad7a94a 100644 --- a/Modules/clinic/resource.c.h +++ b/Modules/clinic/resource.c.h @@ -19,11 +19,6 @@ resource_getrusage(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int who; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } who = _PyLong_AsInt(arg); if (who == -1 && PyErr_Occurred()) { goto exit; @@ -51,11 +46,6 @@ resource_getrlimit(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int resource; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } resource = _PyLong_AsInt(arg); if (resource == -1 && PyErr_Occurred()) { goto exit; @@ -87,11 +77,6 @@ resource_setrlimit(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("setrlimit", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } resource = _PyLong_AsInt(args[0]); if (resource == -1 && PyErr_Occurred()) { goto exit; @@ -178,4 +163,4 @@ resource_getpagesize(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef RESOURCE_PRLIMIT_METHODDEF #define RESOURCE_PRLIMIT_METHODDEF #endif /* !defined(RESOURCE_PRLIMIT_METHODDEF) */ -/*[clinic end generated code: output=ef3034f291156a34 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ad190fb33d647d1e input=a9049054013a1b77]*/ diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h index 888054b29eba2..cd7f3846da695 100644 --- a/Modules/clinic/selectmodule.c.h +++ b/Modules/clinic/selectmodule.c.h @@ -528,11 +528,6 @@ select_epoll(PyTypeObject *type, PyObject *args, PyObject *kwargs) goto skip_optional_pos; } if (fastargs[0]) { - if (PyFloat_Check(fastargs[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } sizehint = _PyLong_AsInt(fastargs[0]); if (sizehint == -1 && PyErr_Occurred()) { goto exit; @@ -541,11 +536,6 @@ select_epoll(PyTypeObject *type, PyObject *args, PyObject *kwargs) goto skip_optional_pos; } } - if (PyFloat_Check(fastargs[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(fastargs[1]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -625,11 +615,6 @@ select_epoll_fromfd(PyTypeObject *type, PyObject *arg) PyObject *return_value = NULL; int fd; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -685,11 +670,6 @@ select_epoll_register(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t na if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } eventmask = (unsigned int)PyLong_AsUnsignedLongMask(args[1]); if (eventmask == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -740,11 +720,6 @@ select_epoll_modify(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t narg if (!fildes_converter(args[0], &fd)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } eventmask = (unsigned int)PyLong_AsUnsignedLongMask(args[1]); if (eventmask == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -846,11 +821,6 @@ select_epoll_poll(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } maxevents = _PyLong_AsInt(args[1]); if (maxevents == -1 && PyErr_Occurred()) { goto exit; @@ -1041,11 +1011,6 @@ select_kqueue_fromfd(PyTypeObject *type, PyObject *arg) PyObject *return_value = NULL; int fd; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -1094,11 +1059,6 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize goto exit; } changelist = args[0]; - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } maxevents = _PyLong_AsInt(args[1]); if (maxevents == -1 && PyErr_Occurred()) { goto exit; @@ -1215,4 +1175,4 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize #ifndef SELECT_KQUEUE_CONTROL_METHODDEF #define SELECT_KQUEUE_CONTROL_METHODDEF #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ -/*[clinic end generated code: output=029f23fbe000d7f7 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a055330869acbd16 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/signalmodule.c.h b/Modules/clinic/signalmodule.c.h index 7f60e28a3a28a..33a278e488f94 100644 --- a/Modules/clinic/signalmodule.c.h +++ b/Modules/clinic/signalmodule.c.h @@ -23,11 +23,6 @@ signal_alarm(PyObject *module, PyObject *arg) int seconds; long _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } seconds = _PyLong_AsInt(arg); if (seconds == -1 && PyErr_Occurred()) { goto exit; @@ -84,11 +79,6 @@ signal_raise_signal(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int signalnum; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } signalnum = _PyLong_AsInt(arg); if (signalnum == -1 && PyErr_Occurred()) { goto exit; @@ -128,11 +118,6 @@ signal_signal(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("signal", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } signalnum = _PyLong_AsInt(args[0]); if (signalnum == -1 && PyErr_Occurred()) { goto exit; @@ -168,11 +153,6 @@ signal_getsignal(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int signalnum; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } signalnum = _PyLong_AsInt(arg); if (signalnum == -1 && PyErr_Occurred()) { goto exit; @@ -204,11 +184,6 @@ signal_strsignal(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int signalnum; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } signalnum = _PyLong_AsInt(arg); if (signalnum == -1 && PyErr_Occurred()) { goto exit; @@ -246,20 +221,10 @@ signal_siginterrupt(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("siginterrupt", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } signalnum = _PyLong_AsInt(args[0]); if (signalnum == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(args[1]); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -303,11 +268,6 @@ signal_setitimer(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("setitimer", nargs, 2, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } which = _PyLong_AsInt(args[0]); if (which == -1 && PyErr_Occurred()) { goto exit; @@ -346,11 +306,6 @@ signal_getitimer(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int which; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } which = _PyLong_AsInt(arg); if (which == -1 && PyErr_Occurred()) { goto exit; @@ -387,11 +342,6 @@ signal_pthread_sigmask(PyObject *module, PyObject *const *args, Py_ssize_t nargs if (!_PyArg_CheckPositional("pthread_sigmask", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } how = _PyLong_AsInt(args[0]); if (how == -1 && PyErr_Occurred()) { goto exit; @@ -594,11 +544,6 @@ signal_pthread_kill(PyObject *module, PyObject *const *args, Py_ssize_t nargs) goto exit; } thread_id = PyLong_AsUnsignedLongMask(args[0]); - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } signalnum = _PyLong_AsInt(args[1]); if (signalnum == -1 && PyErr_Occurred()) { goto exit; @@ -638,20 +583,10 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar if (!_PyArg_CheckPositional("pidfd_send_signal", nargs, 2, 4)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } pidfd = _PyLong_AsInt(args[0]); if (pidfd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } signalnum = _PyLong_AsInt(args[1]); if (signalnum == -1 && PyErr_Occurred()) { goto exit; @@ -663,11 +598,6 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar if (nargs < 4) { goto skip_optional; } - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[3]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -732,4 +662,4 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar #ifndef SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #define SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #endif /* !defined(SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF) */ -/*[clinic end generated code: output=b41b4b6bd9ad4da2 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=dff93c869101f043 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/zlibmodule.c.h b/Modules/clinic/zlibmodule.c.h index 77ea04a353bf1..2b72aeb8df56f 100644 --- a/Modules/clinic/zlibmodule.c.h +++ b/Modules/clinic/zlibmodule.c.h @@ -44,11 +44,6 @@ zlib_compress(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } level = _PyLong_AsInt(args[1]); if (level == -1 && PyErr_Occurred()) { goto exit; @@ -112,11 +107,6 @@ zlib_decompress(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj goto skip_optional_pos; } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } wbits = _PyLong_AsInt(args[1]); if (wbits == -1 && PyErr_Occurred()) { goto exit; @@ -125,8 +115,17 @@ zlib_decompress(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj goto skip_optional_pos; } } - if (!ssize_t_converter(args[2], &bufsize)) { - goto exit; + { + Py_ssize_t ival = -1; + PyObject *iobj = PyNumber_Index(args[2]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + bufsize = ival; } skip_optional_pos: return_value = zlib_decompress_impl(module, &data, wbits, bufsize); @@ -200,11 +199,6 @@ zlib_compressobj(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb goto skip_optional_pos; } if (args[0]) { - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } level = _PyLong_AsInt(args[0]); if (level == -1 && PyErr_Occurred()) { goto exit; @@ -214,11 +208,6 @@ zlib_compressobj(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb } } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } method = _PyLong_AsInt(args[1]); if (method == -1 && PyErr_Occurred()) { goto exit; @@ -228,11 +217,6 @@ zlib_compressobj(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb } } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } wbits = _PyLong_AsInt(args[2]); if (wbits == -1 && PyErr_Occurred()) { goto exit; @@ -242,11 +226,6 @@ zlib_compressobj(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb } } if (args[3]) { - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } memLevel = _PyLong_AsInt(args[3]); if (memLevel == -1 && PyErr_Occurred()) { goto exit; @@ -256,11 +235,6 @@ zlib_compressobj(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb } } if (args[4]) { - if (PyFloat_Check(args[4])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } strategy = _PyLong_AsInt(args[4]); if (strategy == -1 && PyErr_Occurred()) { goto exit; @@ -325,11 +299,6 @@ zlib_decompressobj(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py goto skip_optional_pos; } if (args[0]) { - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } wbits = _PyLong_AsInt(args[0]); if (wbits == -1 && PyErr_Occurred()) { goto exit; @@ -438,8 +407,17 @@ zlib_Decompress_decompress(compobject *self, PyObject *const *args, Py_ssize_t n if (!noptargs) { goto skip_optional_pos; } - if (!ssize_t_converter(args[1], &max_length)) { - goto exit; + { + Py_ssize_t ival = -1; + PyObject *iobj = PyNumber_Index(args[1]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + max_length = ival; } skip_optional_pos: return_value = zlib_Decompress_decompress_impl(self, &data, max_length); @@ -483,11 +461,6 @@ zlib_Compress_flush(compobject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[0]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -636,8 +609,17 @@ zlib_Decompress_flush(compobject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (!ssize_t_converter(args[0], &length)) { - goto exit; + { + Py_ssize_t ival = -1; + PyObject *iobj = PyNumber_Index(args[0]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + length = ival; } skip_optional: return_value = zlib_Decompress_flush_impl(self, length); @@ -683,11 +665,6 @@ zlib_adler32(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } value = (unsigned int)PyLong_AsUnsignedLongMask(args[1]); if (value == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -741,11 +718,6 @@ zlib_crc32(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } value = (unsigned int)PyLong_AsUnsignedLongMask(args[1]); if (value == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -785,4 +757,4 @@ zlib_crc32(PyObject *module, PyObject *const *args, Py_ssize_t nargs) #ifndef ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #define ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #endif /* !defined(ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF) */ -/*[clinic end generated code: output=faae38ef96b88b16 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=06b6438506aab0cb input=a9049054013a1b77]*/ diff --git a/Modules/grpmodule.c b/Modules/grpmodule.c index cdb3ae859b389..c4d16819e487a 100644 --- a/Modules/grpmodule.c +++ b/Modules/grpmodule.c @@ -111,30 +111,14 @@ static PyObject * grp_getgrgid_impl(PyObject *module, PyObject *id) /*[clinic end generated code: output=30797c289504a1ba input=15fa0e2ccf5cda25]*/ { - PyObject *py_int_id, *retval = NULL; + PyObject *retval = NULL; int nomem = 0; char *buf = NULL, *buf2 = NULL; gid_t gid; struct group *p; if (!_Py_Gid_Converter(id, &gid)) { - if (!PyErr_ExceptionMatches(PyExc_TypeError)) { - return NULL; - } - PyErr_Clear(); - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "group id must be int, not %.200", - Py_TYPE(id)->tp_name) < 0) { - return NULL; - } - py_int_id = PyNumber_Long(id); - if (!py_int_id) - return NULL; - if (!_Py_Gid_Converter(py_int_id, &gid)) { - Py_DECREF(py_int_id); - return NULL; - } - Py_DECREF(py_int_id); + return NULL; } #ifdef HAVE_GETGRGID_R int status; diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index f1d59c09e6272..5b96631d633c1 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -2047,37 +2047,9 @@ math_factorial(PyObject *module, PyObject *arg) { long x, two_valuation; int overflow; - PyObject *result, *odd_part, *pyint_form; - - if (PyFloat_Check(arg)) { - if (PyErr_WarnEx(PyExc_DeprecationWarning, - "Using factorial() with floats is deprecated", - 1) < 0) - { - return NULL; - } - PyObject *lx; - double dx = PyFloat_AS_DOUBLE((PyFloatObject *)arg); - if (!(Py_IS_FINITE(dx) && dx == floor(dx))) { - PyErr_SetString(PyExc_ValueError, - "factorial() only accepts integral values"); - return NULL; - } - lx = PyLong_FromDouble(dx); - if (lx == NULL) - return NULL; - x = PyLong_AsLongAndOverflow(lx, &overflow); - Py_DECREF(lx); - } - else { - pyint_form = PyNumber_Index(arg); - if (pyint_form == NULL) { - return NULL; - } - x = PyLong_AsLongAndOverflow(pyint_form, &overflow); - Py_DECREF(pyint_form); - } + PyObject *result, *odd_part; + x = PyLong_AsLongAndOverflow(arg, &overflow); if (x == -1 && PyErr_Occurred()) { return NULL; } diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index ddff28354a7c1..59ac47de1a709 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -13903,11 +13903,6 @@ static PyObject * os_waitstatus_to_exitcode_impl(PyObject *module, PyObject *status_obj) /*[clinic end generated code: output=db50b1b0ba3c7153 input=7fe2d7fdaea3db42]*/ { - if (PyFloat_Check(status_obj)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - return NULL; - } #ifndef MS_WINDOWS int status = _PyLong_AsInt(status_obj); if (status == -1 && PyErr_Occurred()) { diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 580ac0af5aff5..92c246ebea76f 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -5196,13 +5196,6 @@ sock_initobj(PyObject *self, PyObject *args, PyObject *kwds) else #endif { - - if (PyFloat_Check(fdobj)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float"); - return -1; - } - fd = PyLong_AsSocket_t(fdobj); if (fd == (SOCKET_T)(-1) && PyErr_Occurred()) return -1; diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c index fe27909ae8a75..fd3064952869b 100644 --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -287,37 +287,6 @@ zlib_compress_impl(PyObject *module, Py_buffer *data, int level) return NULL; } -/*[python input] - -class ssize_t_converter(CConverter): - type = 'Py_ssize_t' - converter = 'ssize_t_converter' - c_ignored_default = "0" - -[python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=5f34ba1b394cb8e7]*/ - -static int -ssize_t_converter(PyObject *obj, void *ptr) -{ - PyObject *long_obj; - Py_ssize_t val; - - /* XXX Should be replaced with PyNumber_AsSsize_t after the end of the - deprecation period. */ - long_obj = _PyLong_FromNbIndexOrNbInt(obj); - if (long_obj == NULL) { - return 0; - } - val = PyLong_AsSsize_t(long_obj); - Py_DECREF(long_obj); - if (val == -1 && PyErr_Occurred()) { - return 0; - } - *(Py_ssize_t *)ptr = val; - return 1; -} - /*[clinic input] zlib.decompress @@ -326,7 +295,7 @@ zlib.decompress / wbits: int(c_default="MAX_WBITS") = MAX_WBITS The window buffer size and container format. - bufsize: ssize_t(c_default="DEF_BUF_SIZE") = DEF_BUF_SIZE + bufsize: Py_ssize_t(c_default="DEF_BUF_SIZE") = DEF_BUF_SIZE The initial output buffer size. Returns a bytes object containing the uncompressed data. @@ -335,7 +304,7 @@ Returns a bytes object containing the uncompressed data. static PyObject * zlib_decompress_impl(PyObject *module, Py_buffer *data, int wbits, Py_ssize_t bufsize) -/*[clinic end generated code: output=77c7e35111dc8c42 input=21960936208e9a5b]*/ +/*[clinic end generated code: output=77c7e35111dc8c42 input=a9ac17beff1f893f]*/ { PyObject *RetVal = NULL; Byte *ibuf; @@ -756,7 +725,7 @@ zlib.Decompress.decompress data: Py_buffer The binary data to decompress. / - max_length: ssize_t = 0 + max_length: Py_ssize_t = 0 The maximum allowable length of the decompressed data. Unconsumed input data will be stored in the unconsumed_tail attribute. @@ -771,7 +740,7 @@ Call the flush() method to clear these buffers. static PyObject * zlib_Decompress_decompress_impl(compobject *self, Py_buffer *data, Py_ssize_t max_length) -/*[clinic end generated code: output=6e5173c74e710352 input=b85a212a012b770a]*/ +/*[clinic end generated code: output=6e5173c74e710352 input=0a95d05a3bceaeaa]*/ { int err = Z_OK; Py_ssize_t ibuflen, obuflen = DEF_BUF_SIZE, hard_limit; @@ -1113,7 +1082,7 @@ zlib_Decompress___deepcopy__(compobject *self, PyObject *memo) /*[clinic input] zlib.Decompress.flush - length: ssize_t(c_default="DEF_BUF_SIZE") = zlib.DEF_BUF_SIZE + length: Py_ssize_t(c_default="DEF_BUF_SIZE") = zlib.DEF_BUF_SIZE the initial size of the output buffer. / @@ -1122,7 +1091,7 @@ Return a bytes object containing any remaining decompressed data. static PyObject * zlib_Decompress_flush_impl(compobject *self, Py_ssize_t length) -/*[clinic end generated code: output=68c75ea127cbe654 input=aa4ec37f3aef4da0]*/ +/*[clinic end generated code: output=68c75ea127cbe654 input=427f2a05a8c2113a]*/ { int err, flush; Py_buffer data; diff --git a/Objects/abstract.c b/Objects/abstract.c index 5b85b014bd22e..e8198492c63e5 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -1426,14 +1426,32 @@ PyNumber_Long(PyObject *o) } m = Py_TYPE(o)->tp_as_number; if (m && m->nb_int) { /* This should include subclasses of int */ - result = _PyLong_FromNbInt(o); - if (result != NULL && !PyLong_CheckExact(result)) { - Py_SETREF(result, _PyLong_Copy((PyLongObject *)result)); + /* Convert using the nb_int slot, which should return something + of exact type int. */ + result = m->nb_int(o); + if (!result || PyLong_CheckExact(result)) + return result; + if (!PyLong_Check(result)) { + PyErr_Format(PyExc_TypeError, + "__int__ returned non-int (type %.200s)", + result->ob_type->tp_name); + Py_DECREF(result); + return NULL; + } + /* Issue #17576: warn if 'result' not of exact type int. */ + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + result->ob_type->tp_name)) { + Py_DECREF(result); + return NULL; } + Py_SETREF(result, _PyLong_Copy((PyLongObject *)result)); return result; } if (m && m->nb_index) { - result = _PyLong_FromNbIndexOrNbInt(o); + result = PyNumber_Index(o); if (result != NULL && !PyLong_CheckExact(result)) { Py_SETREF(result, _PyLong_Copy((PyLongObject *)result)); } @@ -1452,8 +1470,7 @@ PyNumber_Long(PyObject *o) } /* __trunc__ is specified to return an Integral type, but int() needs to return an int. */ - m = Py_TYPE(result)->tp_as_number; - if (m == NULL || (m->nb_index == NULL && m->nb_int == NULL)) { + if (!PyIndex_Check(result)) { PyErr_Format( PyExc_TypeError, "__trunc__ returned non-Integral (type %.200s)", @@ -1461,7 +1478,7 @@ PyNumber_Long(PyObject *o) Py_DECREF(result); return NULL; } - Py_SETREF(result, _PyLong_FromNbIndexOrNbInt(result)); + Py_SETREF(result, PyNumber_Index(result)); if (result != NULL && !PyLong_CheckExact(result)) { Py_SETREF(result, _PyLong_Copy((PyLongObject *)result)); } diff --git a/Objects/clinic/bytearrayobject.c.h b/Objects/clinic/bytearrayobject.c.h index 35ba1ff3d576d..83b0d03c56908 100644 --- a/Objects/clinic/bytearrayobject.c.h +++ b/Objects/clinic/bytearrayobject.c.h @@ -268,11 +268,6 @@ bytearray_replace(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nar if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -346,11 +341,6 @@ bytearray_split(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -450,11 +440,6 @@ bytearray_rsplit(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -519,11 +504,6 @@ bytearray_insert(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg if (!_PyArg_CheckPositional("insert", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -617,11 +597,6 @@ bytearray_pop(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -896,11 +871,6 @@ bytearray_splitlines(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } keepends = _PyLong_AsInt(args[0]); if (keepends == -1 && PyErr_Occurred()) { goto exit; @@ -1000,11 +970,6 @@ bytearray_hex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } bytes_per_sep = _PyLong_AsInt(args[1]); if (bytes_per_sep == -1 && PyErr_Occurred()) { goto exit; @@ -1058,11 +1023,6 @@ bytearray_reduce_ex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t n if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } proto = _PyLong_AsInt(args[0]); if (proto == -1 && PyErr_Occurred()) { goto exit; @@ -1091,4 +1051,4 @@ bytearray_sizeof(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) { return bytearray_sizeof_impl(self); } -/*[clinic end generated code: output=b2919f76709e48dc input=a9049054013a1b77]*/ +/*[clinic end generated code: output=920748990279fb9d input=a9049054013a1b77]*/ diff --git a/Objects/clinic/bytesobject.c.h b/Objects/clinic/bytesobject.c.h index 063a3777b4907..c4a2d0c362611 100644 --- a/Objects/clinic/bytesobject.c.h +++ b/Objects/clinic/bytesobject.c.h @@ -46,11 +46,6 @@ bytes_split(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -202,11 +197,6 @@ bytes_rsplit(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObj goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -493,11 +483,6 @@ bytes_replace(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -715,11 +700,6 @@ bytes_splitlines(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, P if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } keepends = _PyLong_AsInt(args[0]); if (keepends == -1 && PyErr_Occurred()) { goto exit; @@ -819,11 +799,6 @@ bytes_hex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } bytes_per_sep = _PyLong_AsInt(args[1]); if (bytes_per_sep == -1 && PyErr_Occurred()) { goto exit; @@ -834,4 +809,4 @@ bytes_hex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject exit: return return_value; } -/*[clinic end generated code: output=220388917d7bf751 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a0c31faea2671a8c input=a9049054013a1b77]*/ diff --git a/Objects/clinic/codeobject.c.h b/Objects/clinic/codeobject.c.h index 1dd82278cf3d5..aef505ffc3f61 100644 --- a/Objects/clinic/codeobject.c.h +++ b/Objects/clinic/codeobject.c.h @@ -59,11 +59,6 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje goto skip_optional_kwonly; } if (args[0]) { - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } co_argcount = _PyLong_AsInt(args[0]); if (co_argcount == -1 && PyErr_Occurred()) { goto exit; @@ -73,11 +68,6 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[1]) { - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } co_posonlyargcount = _PyLong_AsInt(args[1]); if (co_posonlyargcount == -1 && PyErr_Occurred()) { goto exit; @@ -87,11 +77,6 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } co_kwonlyargcount = _PyLong_AsInt(args[2]); if (co_kwonlyargcount == -1 && PyErr_Occurred()) { goto exit; @@ -101,11 +86,6 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[3]) { - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } co_nlocals = _PyLong_AsInt(args[3]); if (co_nlocals == -1 && PyErr_Occurred()) { goto exit; @@ -115,11 +95,6 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[4]) { - if (PyFloat_Check(args[4])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } co_stacksize = _PyLong_AsInt(args[4]); if (co_stacksize == -1 && PyErr_Occurred()) { goto exit; @@ -129,11 +104,6 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[5]) { - if (PyFloat_Check(args[5])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } co_flags = _PyLong_AsInt(args[5]); if (co_flags == -1 && PyErr_Occurred()) { goto exit; @@ -143,11 +113,6 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje } } if (args[6]) { - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } co_firstlineno = _PyLong_AsInt(args[6]); if (co_firstlineno == -1 && PyErr_Occurred()) { goto exit; @@ -253,4 +218,4 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje exit: return return_value; } -/*[clinic end generated code: output=27fe34e82106b220 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f9f23e912a3955b9 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/listobject.c.h b/Objects/clinic/listobject.c.h index ed137c95a8e10..82884a42b57df 100644 --- a/Objects/clinic/listobject.c.h +++ b/Objects/clinic/listobject.c.h @@ -24,11 +24,6 @@ list_insert(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("insert", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -128,11 +123,6 @@ list_pop(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -196,11 +186,6 @@ list_sort(PyListObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject goto skip_optional_kwonly; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } reverse = _PyLong_AsInt(args[1]); if (reverse == -1 && PyErr_Occurred()) { goto exit; @@ -367,4 +352,4 @@ list___reversed__(PyListObject *self, PyObject *Py_UNUSED(ignored)) { return list___reversed___impl(self); } -/*[clinic end generated code: output=1ff61490c091d165 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=137db7b11196b581 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/longobject.c.h b/Objects/clinic/longobject.c.h index 27e8dfe935b6e..d3d5c1992b3ea 100644 --- a/Objects/clinic/longobject.c.h +++ b/Objects/clinic/longobject.c.h @@ -209,11 +209,6 @@ int_to_bytes(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject * if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -313,4 +308,4 @@ int_from_bytes(PyTypeObject *type, PyObject *const *args, Py_ssize_t nargs, PyOb exit: return return_value; } -/*[clinic end generated code: output=77bc3b2615822cb8 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=46d40c8aa6d420b7 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/memoryobject.c.h b/Objects/clinic/memoryobject.c.h index 75ac2011261fa..8227f0edfbab4 100644 --- a/Objects/clinic/memoryobject.c.h +++ b/Objects/clinic/memoryobject.c.h @@ -56,11 +56,6 @@ memoryview_hex(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } bytes_per_sep = _PyLong_AsInt(args[1]); if (bytes_per_sep == -1 && PyErr_Occurred()) { goto exit; @@ -71,4 +66,4 @@ memoryview_hex(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs exit: return return_value; } -/*[clinic end generated code: output=ee265a73f68b0077 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=91106ef704134b19 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/typeobject.c.h b/Objects/clinic/typeobject.c.h index 357eb44b12b8a..8c70d76d916db 100644 --- a/Objects/clinic/typeobject.c.h +++ b/Objects/clinic/typeobject.c.h @@ -166,11 +166,6 @@ object___reduce_ex__(PyObject *self, PyObject *arg) PyObject *return_value = NULL; int protocol; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } protocol = _PyLong_AsInt(arg); if (protocol == -1 && PyErr_Occurred()) { goto exit; @@ -248,4 +243,4 @@ object___dir__(PyObject *self, PyObject *Py_UNUSED(ignored)) { return object___dir___impl(self); } -/*[clinic end generated code: output=7a6d272d282308f3 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=b4fb62939b08baf9 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/unicodeobject.c.h b/Objects/clinic/unicodeobject.c.h index cf81df4af67b2..2d81730d687cd 100644 --- a/Objects/clinic/unicodeobject.c.h +++ b/Objects/clinic/unicodeobject.c.h @@ -86,11 +86,6 @@ unicode_center(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("center", nargs, 1, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -224,11 +219,6 @@ unicode_expandtabs(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyOb if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } tabsize = _PyLong_AsInt(args[0]); if (tabsize == -1 && PyErr_Occurred()) { goto exit; @@ -530,11 +520,6 @@ unicode_ljust(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("ljust", nargs, 1, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -730,11 +715,6 @@ unicode_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -849,11 +829,6 @@ unicode_rjust(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("rjust", nargs, 1, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -923,11 +898,6 @@ unicode_split(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -1025,11 +995,6 @@ unicode_rsplit(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject goto skip_optional_pos; } } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[1]); @@ -1081,11 +1046,6 @@ unicode_splitlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyOb if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } keepends = _PyLong_AsInt(args[0]); if (keepends == -1 && PyErr_Occurred()) { goto exit; @@ -1231,11 +1191,6 @@ unicode_zfill(PyObject *self, PyObject *arg) PyObject *return_value = NULL; Py_ssize_t width; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(arg); @@ -1303,4 +1258,4 @@ unicode_sizeof(PyObject *self, PyObject *Py_UNUSED(ignored)) { return unicode_sizeof_impl(self); } -/*[clinic end generated code: output=b91233f3722643be input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ea1aff10c743be14 input=a9049054013a1b77]*/ diff --git a/Objects/longobject.c b/Objects/longobject.c index 0ff0e80cd4269..a409948e4aca4 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -119,123 +119,6 @@ long_normalize(PyLongObject *v) return v; } -/* _PyLong_FromNbInt: Convert the given object to a PyLongObject - using the nb_int slot, if available. Raise TypeError if either the - nb_int slot is not available or the result of the call to nb_int - returns something not of type int. -*/ -PyObject * -_PyLong_FromNbInt(PyObject *integral) -{ - PyNumberMethods *nb; - PyObject *result; - - /* Fast path for the case that we already have an int. */ - if (PyLong_CheckExact(integral)) { - Py_INCREF(integral); - return integral; - } - - nb = Py_TYPE(integral)->tp_as_number; - if (nb == NULL || nb->nb_int == NULL) { - PyErr_Format(PyExc_TypeError, - "an integer is required (got type %.200s)", - Py_TYPE(integral)->tp_name); - return NULL; - } - - /* Convert using the nb_int slot, which should return something - of exact type int. */ - result = nb->nb_int(integral); - if (!result || PyLong_CheckExact(result)) - return result; - if (!PyLong_Check(result)) { - PyErr_Format(PyExc_TypeError, - "__int__ returned non-int (type %.200s)", - Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; - } - /* Issue #17576: warn if 'result' not of exact type int. */ - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) { - Py_DECREF(result); - return NULL; - } - return result; -} - -/* Convert the given object to a PyLongObject using the nb_index or - nb_int slots, if available (the latter is deprecated). - Raise TypeError if either nb_index and nb_int slots are not - available or the result of the call to nb_index or nb_int - returns something not of type int. - Should be replaced with PyNumber_Index after the end of the - deprecation period. -*/ -PyObject * -_PyLong_FromNbIndexOrNbInt(PyObject *integral) -{ - PyNumberMethods *nb; - PyObject *result; - - /* Fast path for the case that we already have an int. */ - if (PyLong_CheckExact(integral)) { - Py_INCREF(integral); - return integral; - } - - nb = Py_TYPE(integral)->tp_as_number; - if (nb == NULL || (nb->nb_index == NULL && nb->nb_int == NULL)) { - PyErr_Format(PyExc_TypeError, - "an integer is required (got type %.200s)", - Py_TYPE(integral)->tp_name); - return NULL; - } - - if (nb->nb_index) { - /* Convert using the nb_index slot, which should return something - of exact type int. */ - result = nb->nb_index(integral); - if (!result || PyLong_CheckExact(result)) - return result; - if (!PyLong_Check(result)) { - PyErr_Format(PyExc_TypeError, - "__index__ returned non-int (type %.200s)", - Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; - } - /* Issue #17576: warn if 'result' not of exact type int. */ - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__index__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) - { - Py_DECREF(result); - return NULL; - } - return result; - } - - result = _PyLong_FromNbInt(integral); - if (result && PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "an integer is required (got type %.200s). " - "Implicit conversion to integers using __int__ is deprecated, " - "and may be removed in a future version of Python.", - Py_TYPE(integral)->tp_name)) - { - Py_DECREF(result); - return NULL; - } - return result; -} - - /* Allocate a new int object with size digits. Return NULL and set exception if we run out of memory. */ @@ -511,7 +394,7 @@ PyLong_AsLongAndOverflow(PyObject *vv, int *overflow) v = (PyLongObject *)vv; } else { - v = (PyLongObject *)_PyLong_FromNbIndexOrNbInt(vv); + v = (PyLongObject *)PyNumber_Index(vv); if (v == NULL) return -1; do_decref = 1; @@ -791,7 +674,7 @@ PyLong_AsUnsignedLongMask(PyObject *op) return _PyLong_AsUnsignedLongMask(op); } - lo = (PyLongObject *)_PyLong_FromNbIndexOrNbInt(op); + lo = (PyLongObject *)PyNumber_Index(op); if (lo == NULL) return (unsigned long)-1; @@ -1249,7 +1132,7 @@ PyLong_AsLongLong(PyObject *vv) v = (PyLongObject *)vv; } else { - v = (PyLongObject *)_PyLong_FromNbIndexOrNbInt(vv); + v = (PyLongObject *)PyNumber_Index(vv); if (v == NULL) return -1; do_decref = 1; @@ -1364,7 +1247,7 @@ PyLong_AsUnsignedLongLongMask(PyObject *op) return _PyLong_AsUnsignedLongLongMask(op); } - lo = (PyLongObject *)_PyLong_FromNbIndexOrNbInt(op); + lo = (PyLongObject *)PyNumber_Index(op); if (lo == NULL) return (unsigned long long)-1; @@ -1404,7 +1287,7 @@ PyLong_AsLongLongAndOverflow(PyObject *vv, int *overflow) v = (PyLongObject *)vv; } else { - v = (PyLongObject *)_PyLong_FromNbIndexOrNbInt(vv); + v = (PyLongObject *)PyNumber_Index(vv); if (v == NULL) return -1; do_decref = 1; diff --git a/Objects/stringlib/clinic/transmogrify.h.h b/Objects/stringlib/clinic/transmogrify.h.h index 8a3a060f12bc9..8dd7e6b5bb9e8 100644 --- a/Objects/stringlib/clinic/transmogrify.h.h +++ b/Objects/stringlib/clinic/transmogrify.h.h @@ -33,11 +33,6 @@ stringlib_expandtabs(PyObject *self, PyObject *const *args, Py_ssize_t nargs, Py if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } tabsize = _PyLong_AsInt(args[0]); if (tabsize == -1 && PyErr_Occurred()) { goto exit; @@ -73,11 +68,6 @@ stringlib_ljust(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("ljust", nargs, 1, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -134,11 +124,6 @@ stringlib_rjust(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("rjust", nargs, 1, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -195,11 +180,6 @@ stringlib_center(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("center", nargs, 1, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[0]); @@ -252,11 +232,6 @@ stringlib_zfill(PyObject *self, PyObject *arg) PyObject *return_value = NULL; Py_ssize_t width; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(arg); @@ -274,4 +249,4 @@ stringlib_zfill(PyObject *self, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=15be047aef999b4e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=cd5ecdbf1d9e849a input=a9049054013a1b77]*/ diff --git a/PC/clinic/msvcrtmodule.c.h b/PC/clinic/msvcrtmodule.c.h index 180c3e5fc54a7..9701e8a63be8c 100644 --- a/PC/clinic/msvcrtmodule.c.h +++ b/PC/clinic/msvcrtmodule.c.h @@ -53,29 +53,14 @@ msvcrt_locking(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("locking", nargs, 3, 3)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[1]); if (mode == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } nbytes = PyLong_AsLong(args[2]); if (nbytes == -1 && PyErr_Occurred()) { goto exit; @@ -114,20 +99,10 @@ msvcrt_setmode(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("setmode", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(args[0]); if (fd == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[1]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -201,11 +176,6 @@ msvcrt_get_osfhandle(PyObject *module, PyObject *arg) int fd; void *_return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } fd = _PyLong_AsInt(arg); if (fd == -1 && PyErr_Occurred()) { goto exit; @@ -561,20 +531,10 @@ msvcrt_CrtSetReportMode(PyObject *module, PyObject *const *args, Py_ssize_t narg if (!_PyArg_CheckPositional("CrtSetReportMode", nargs, 2, 2)) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } type = _PyLong_AsInt(args[0]); if (type == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(args[1]); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -614,11 +574,6 @@ msvcrt_set_error_mode(PyObject *module, PyObject *arg) int mode; long _return_value; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = _PyLong_AsInt(arg); if (mode == -1 && PyErr_Occurred()) { goto exit; @@ -653,11 +608,6 @@ msvcrt_SetErrorMode(PyObject *module, PyObject *arg) PyObject *return_value = NULL; unsigned int mode; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } mode = (unsigned int)PyLong_AsUnsignedLongMask(arg); if (mode == (unsigned int)-1 && PyErr_Occurred()) { goto exit; @@ -679,4 +629,4 @@ msvcrt_SetErrorMode(PyObject *module, PyObject *arg) #ifndef MSVCRT_SET_ERROR_MODE_METHODDEF #define MSVCRT_SET_ERROR_MODE_METHODDEF #endif /* !defined(MSVCRT_SET_ERROR_MODE_METHODDEF) */ -/*[clinic end generated code: output=7cc6ffaf64f268f7 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ab3b5ce5c1447f0e input=a9049054013a1b77]*/ diff --git a/PC/clinic/winreg.c.h b/PC/clinic/winreg.c.h index b7af1855ac545..5f37fcda0a9ab 100644 --- a/PC/clinic/winreg.c.h +++ b/PC/clinic/winreg.c.h @@ -435,11 +435,6 @@ winreg_EnumKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!clinic_HKEY_converter(args[0], &key)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } index = _PyLong_AsInt(args[1]); if (index == -1 && PyErr_Occurred()) { goto exit; @@ -493,11 +488,6 @@ winreg_EnumValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!clinic_HKEY_converter(args[0], &key)) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } index = _PyLong_AsInt(args[1]); if (index == -1 && PyErr_Occurred()) { goto exit; @@ -1121,4 +1111,4 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=015afbbd690eb59d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f4f996d40d06f14c input=a9049054013a1b77]*/ diff --git a/PC/clinic/winsound.c.h b/PC/clinic/winsound.c.h index b37db4c6cbc49..c5458990baa7f 100644 --- a/PC/clinic/winsound.c.h +++ b/PC/clinic/winsound.c.h @@ -34,11 +34,6 @@ winsound_PlaySound(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py goto exit; } sound = args[0]; - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[1]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -81,20 +76,10 @@ winsound_Beep(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } frequency = _PyLong_AsInt(args[0]); if (frequency == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } duration = _PyLong_AsInt(args[1]); if (duration == -1 && PyErr_Occurred()) { goto exit; @@ -136,11 +121,6 @@ winsound_MessageBeep(PyObject *module, PyObject *const *args, Py_ssize_t nargs, if (!noptargs) { goto skip_optional_pos; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } type = _PyLong_AsInt(args[0]); if (type == -1 && PyErr_Occurred()) { goto exit; @@ -151,4 +131,4 @@ winsound_MessageBeep(PyObject *module, PyObject *const *args, Py_ssize_t nargs, exit: return return_value; } -/*[clinic end generated code: output=28d1cd033282723d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=16b3c1a96861cd3a input=a9049054013a1b77]*/ diff --git a/Python/clinic/_warnings.c.h b/Python/clinic/_warnings.c.h index 67ab0e3d9de52..80ed2ae8ab123 100644 --- a/Python/clinic/_warnings.c.h +++ b/Python/clinic/_warnings.c.h @@ -43,11 +43,6 @@ warnings_warn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec } } if (args[2]) { - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } { Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index(args[2]); @@ -71,4 +66,4 @@ warnings_warn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=b7bb54c73b5433ec input=a9049054013a1b77]*/ +/*[clinic end generated code: output=484e5ffe94edf0f0 input=a9049054013a1b77]*/ diff --git a/Python/clinic/bltinmodule.c.h b/Python/clinic/bltinmodule.c.h index d15af1f7f377c..377afded9f8c5 100644 --- a/Python/clinic/bltinmodule.c.h +++ b/Python/clinic/bltinmodule.c.h @@ -134,11 +134,6 @@ builtin_chr(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int i; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } i = _PyLong_AsInt(arg); if (i == -1 && PyErr_Occurred()) { goto exit; @@ -216,11 +211,6 @@ builtin_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj goto skip_optional_pos; } if (args[3]) { - if (PyFloat_Check(args[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flags = _PyLong_AsInt(args[3]); if (flags == -1 && PyErr_Occurred()) { goto exit; @@ -230,11 +220,6 @@ builtin_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[4]) { - if (PyFloat_Check(args[4])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } dont_inherit = _PyLong_AsInt(args[4]); if (dont_inherit == -1 && PyErr_Occurred()) { goto exit; @@ -244,11 +229,6 @@ builtin_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } } if (args[5]) { - if (PyFloat_Check(args[5])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } optimize = _PyLong_AsInt(args[5]); if (optimize == -1 && PyErr_Occurred()) { goto exit; @@ -261,11 +241,6 @@ builtin_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj if (!noptargs) { goto skip_optional_kwonly; } - if (PyFloat_Check(args[6])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } feature_version = _PyLong_AsInt(args[6]); if (feature_version == -1 && PyErr_Occurred()) { goto exit; @@ -855,4 +830,4 @@ builtin_issubclass(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=29686a89b739d600 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=780fd9712ec6a6db input=a9049054013a1b77]*/ diff --git a/Python/clinic/import.c.h b/Python/clinic/import.c.h index e4867f34d4ef1..4e013cc97d6b9 100644 --- a/Python/clinic/import.c.h +++ b/Python/clinic/import.c.h @@ -420,11 +420,6 @@ _imp_source_hash(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } key = PyLong_AsLong(args[0]); if (key == -1 && PyErr_Occurred()) { goto exit; @@ -454,4 +449,4 @@ _imp_source_hash(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb #ifndef _IMP_EXEC_DYNAMIC_METHODDEF #define _IMP_EXEC_DYNAMIC_METHODDEF #endif /* !defined(_IMP_EXEC_DYNAMIC_METHODDEF) */ -/*[clinic end generated code: output=3dc495e9c64d944e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=7c31c433af88af6b input=a9049054013a1b77]*/ diff --git a/Python/clinic/marshal.c.h b/Python/clinic/marshal.c.h index 05d4830c4ab31..f80d5ef31f29c 100644 --- a/Python/clinic/marshal.c.h +++ b/Python/clinic/marshal.c.h @@ -42,11 +42,6 @@ marshal_dump(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 3) { goto skip_optional; } - if (PyFloat_Check(args[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } version = _PyLong_AsInt(args[2]); if (version == -1 && PyErr_Occurred()) { goto exit; @@ -111,11 +106,6 @@ marshal_dumps(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 2) { goto skip_optional; } - if (PyFloat_Check(args[1])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } version = _PyLong_AsInt(args[1]); if (version == -1 && PyErr_Occurred()) { goto exit; @@ -165,4 +155,4 @@ marshal_loads(PyObject *module, PyObject *arg) return return_value; } -/*[clinic end generated code: output=a859dabe8b0afeb6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=68b78f38bfe0c06d input=a9049054013a1b77]*/ diff --git a/Python/clinic/sysmodule.c.h b/Python/clinic/sysmodule.c.h index 4615ebaab5de2..c1a9a2d69f09f 100644 --- a/Python/clinic/sysmodule.c.h +++ b/Python/clinic/sysmodule.c.h @@ -372,11 +372,6 @@ sys_setrecursionlimit(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int new_limit; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } new_limit = _PyLong_AsInt(arg); if (new_limit == -1 && PyErr_Occurred()) { goto exit; @@ -417,11 +412,6 @@ sys_set_coroutine_origin_tracking_depth(PyObject *module, PyObject *const *args, if (!args) { goto exit; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } depth = _PyLong_AsInt(args[0]); if (depth == -1 && PyErr_Occurred()) { goto exit; @@ -590,11 +580,6 @@ sys_setdlopenflags(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int new_val; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } new_val = _PyLong_AsInt(arg); if (new_val == -1 && PyErr_Occurred()) { goto exit; @@ -650,11 +635,6 @@ sys_mdebug(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int flag; - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } flag = _PyLong_AsInt(arg); if (flag == -1 && PyErr_Occurred()) { goto exit; @@ -790,11 +770,6 @@ sys__getframe(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (nargs < 1) { goto skip_optional; } - if (PyFloat_Check(args[0])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } depth = _PyLong_AsInt(args[0]); if (depth == -1 && PyErr_Occurred()) { goto exit; @@ -970,4 +945,4 @@ sys_getandroidapilevel(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef SYS_GETANDROIDAPILEVEL_METHODDEF #define SYS_GETANDROIDAPILEVEL_METHODDEF #endif /* !defined(SYS_GETANDROIDAPILEVEL_METHODDEF) */ -/*[clinic end generated code: output=39eb34a01fb9a919 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=87baa3357293ea65 input=a9049054013a1b77]*/ diff --git a/Python/clinic/traceback.c.h b/Python/clinic/traceback.c.h index 04daf2a376698..404a0c416d346 100644 --- a/Python/clinic/traceback.c.h +++ b/Python/clinic/traceback.c.h @@ -36,20 +36,10 @@ tb_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) goto exit; } tb_frame = (PyFrameObject *)fastargs[1]; - if (PyFloat_Check(fastargs[2])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } tb_lasti = _PyLong_AsInt(fastargs[2]); if (tb_lasti == -1 && PyErr_Occurred()) { goto exit; } - if (PyFloat_Check(fastargs[3])) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - } tb_lineno = _PyLong_AsInt(fastargs[3]); if (tb_lineno == -1 && PyErr_Occurred()) { goto exit; @@ -59,4 +49,4 @@ tb_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=3def6c06248feed8 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=403778d7af5ebef9 input=a9049054013a1b77]*/ diff --git a/Python/getargs.c b/Python/getargs.c index 774242828e45e..63afae25494cd 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -643,22 +643,6 @@ converterr(const char *expected, PyObject *arg, char *msgbuf, size_t bufsize) #define CONV_UNICODE "(unicode conversion error)" -/* Explicitly check for float arguments when integers are expected. - Return 1 for error, 0 if ok. - XXX Should be removed after the end of the deprecation period in - _PyLong_FromNbIndexOrNbInt. */ -static int -float_argument_error(PyObject *arg) -{ - if (PyFloat_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - return 1; - } - else - return 0; -} - /* Convert a non-tuple argument. Return NULL if conversion went OK, or a string with a message describing the failure. The message is formatted as "must be , not ". @@ -704,10 +688,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, case 'b': { /* unsigned byte -- very short int */ char *p = va_arg(*p_va, char *); - long ival; - if (float_argument_error(arg)) - RETURN_ERR_OCCURRED; - ival = PyLong_AsLong(arg); + long ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else if (ival < 0) { @@ -728,11 +709,8 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, case 'B': {/* byte sized bitfield - both signed and unsigned values allowed */ char *p = va_arg(*p_va, char *); - long ival; - if (float_argument_error(arg)) - RETURN_ERR_OCCURRED; - ival = PyLong_AsUnsignedLongMask(arg); - if (ival == -1 && PyErr_Occurred()) + unsigned long ival = PyLong_AsUnsignedLongMask(arg); + if (ival == (unsigned long)-1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = (unsigned char) ival; @@ -741,10 +719,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, case 'h': {/* signed short int */ short *p = va_arg(*p_va, short *); - long ival; - if (float_argument_error(arg)) - RETURN_ERR_OCCURRED; - ival = PyLong_AsLong(arg); + long ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else if (ival < SHRT_MIN) { @@ -765,11 +740,8 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, case 'H': { /* short int sized bitfield, both signed and unsigned allowed */ unsigned short *p = va_arg(*p_va, unsigned short *); - long ival; - if (float_argument_error(arg)) - RETURN_ERR_OCCURRED; - ival = PyLong_AsUnsignedLongMask(arg); - if (ival == -1 && PyErr_Occurred()) + unsigned long ival = PyLong_AsUnsignedLongMask(arg); + if (ival == (unsigned long)-1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = (unsigned short) ival; @@ -778,10 +750,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, case 'i': {/* signed int */ int *p = va_arg(*p_va, int *); - long ival; - if (float_argument_error(arg)) - RETURN_ERR_OCCURRED; - ival = PyLong_AsLong(arg); + long ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else if (ival > INT_MAX) { @@ -802,14 +771,11 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, case 'I': { /* int sized bitfield, both signed and unsigned allowed */ unsigned int *p = va_arg(*p_va, unsigned int *); - unsigned int ival; - if (float_argument_error(arg)) - RETURN_ERR_OCCURRED; - ival = (unsigned int)PyLong_AsUnsignedLongMask(arg); - if (ival == (unsigned int)-1 && PyErr_Occurred()) + unsigned long ival = PyLong_AsUnsignedLongMask(arg); + if (ival == (unsigned long)-1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else - *p = ival; + *p = (unsigned int) ival; break; } @@ -818,8 +784,6 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, PyObject *iobj; Py_ssize_t *p = va_arg(*p_va, Py_ssize_t *); Py_ssize_t ival = -1; - if (float_argument_error(arg)) - RETURN_ERR_OCCURRED; iobj = PyNumber_Index(arg); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); @@ -832,10 +796,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, } case 'l': {/* long int */ long *p = va_arg(*p_va, long *); - long ival; - if (float_argument_error(arg)) - RETURN_ERR_OCCURRED; - ival = PyLong_AsLong(arg); + long ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else @@ -856,10 +817,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, case 'L': {/* long long */ long long *p = va_arg( *p_va, long long * ); - long long ival; - if (float_argument_error(arg)) - RETURN_ERR_OCCURRED; - ival = PyLong_AsLongLong(arg); + long long ival = PyLong_AsLongLong(arg); if (ival == (long long)-1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index b07ffdd928f15..0f40e0679f007 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -2736,11 +2736,6 @@ def parse_arg(self, argname, displayname): # XXX PyFloat_Check can be removed after the end of the # deprecation in _PyLong_FromNbIndexOrNbInt. return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {paramname} = _PyLong_AsInt({argname}); if ({paramname} == -1 && PyErr_Occurred()) {{{{ goto exit; @@ -2821,11 +2816,6 @@ def converter_init(self, *, bitwise=False): def parse_arg(self, argname, displayname): if self.format_unit == 'b': return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {{{{ long ival = PyLong_AsLong({argname}); if (ival == -1 && PyErr_Occurred()) {{{{ @@ -2848,14 +2838,9 @@ def parse_arg(self, argname, displayname): """.format(argname=argname, paramname=self.name) elif self.format_unit == 'B': return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {{{{ - long ival = PyLong_AsUnsignedLongMask({argname}); - if (ival == -1 && PyErr_Occurred()) {{{{ + unsigned long ival = PyLong_AsUnsignedLongMask({argname}); + if (ival == (unsigned long)-1 && PyErr_Occurred()) {{{{ goto exit; }}}} else {{{{ @@ -2876,11 +2861,6 @@ class short_converter(CConverter): def parse_arg(self, argname, displayname): if self.format_unit == 'h': return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {{{{ long ival = PyLong_AsLong({argname}); if (ival == -1 && PyErr_Occurred()) {{{{ @@ -2917,11 +2897,6 @@ def converter_init(self, *, bitwise=False): def parse_arg(self, argname, displayname): if self.format_unit == 'H': return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {paramname} = (unsigned short)PyLong_AsUnsignedLongMask({argname}); if ({paramname} == (unsigned short)-1 && PyErr_Occurred()) {{{{ goto exit; @@ -2947,11 +2922,6 @@ def converter_init(self, *, accept={int}, type=None): def parse_arg(self, argname, displayname): if self.format_unit == 'i': return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {paramname} = _PyLong_AsInt({argname}); if ({paramname} == -1 && PyErr_Occurred()) {{{{ goto exit; @@ -2989,11 +2959,6 @@ def converter_init(self, *, bitwise=False): def parse_arg(self, argname, displayname): if self.format_unit == 'I': return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {paramname} = (unsigned int)PyLong_AsUnsignedLongMask({argname}); if ({paramname} == (unsigned int)-1 && PyErr_Occurred()) {{{{ goto exit; @@ -3010,11 +2975,6 @@ class long_converter(CConverter): def parse_arg(self, argname, displayname): if self.format_unit == 'l': return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {paramname} = PyLong_AsLong({argname}); if ({paramname} == -1 && PyErr_Occurred()) {{{{ goto exit; @@ -3054,11 +3014,6 @@ class long_long_converter(CConverter): def parse_arg(self, argname, displayname): if self.format_unit == 'L': return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {paramname} = PyLong_AsLongLong({argname}); if ({paramname} == -1 && PyErr_Occurred()) {{{{ goto exit; @@ -3105,11 +3060,6 @@ def converter_init(self, *, accept={int}): def parse_arg(self, argname, displayname): if self.format_unit == 'n': return """ - if (PyFloat_Check({argname})) {{{{ - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float" ); - goto exit; - }}}} {{{{ Py_ssize_t ival = -1; PyObject *iobj = PyNumber_Index({argname}); From webhook-mailer at python.org Tue May 26 13:58:52 2020 From: webhook-mailer at python.org (Guido van Rossum) Date: Tue, 26 May 2020 17:58:52 -0000 Subject: [Python-checkins] Add soft keywords (GH-20370) Message-ID: https://github.com/python/cpython/commit/b45af1a5691e83b86321fc52d173f66cf891ce5f commit: b45af1a5691e83b86321fc52d173f66cf891ce5f branch: master author: Guido van Rossum committer: GitHub date: 2020-05-26T10:58:44-07:00 summary: Add soft keywords (GH-20370) These are like keywords but they only work in context; they are not reserved except when there is an exact match. This would enable things like match statements without reserving `match` (which would be bad for the `re.match()` function and probably lots of other places). Automerge-Triggered-By: @gvanrossum files: M Lib/test/test_peg_generator/test_c_parser.py M Parser/pegen/pegen.c M Parser/pegen/pegen.h M Tools/peg_generator/pegen/c_generator.py diff --git a/Lib/test/test_peg_generator/test_c_parser.py b/Lib/test/test_peg_generator/test_c_parser.py index f66b92def9f6c..72383d5b5a631 100644 --- a/Lib/test/test_peg_generator/test_c_parser.py +++ b/Lib/test/test_peg_generator/test_c_parser.py @@ -402,3 +402,33 @@ def test_error_in_rules(self) -> None: parse.parse_string("a", mode=0) """ self.run_test(grammar_source, test_source) + + def test_no_soft_keywords(self) -> None: + grammar_source = """ + start: expr+ NEWLINE? ENDMARKER + expr: 'foo' + """ + grammar = parse_string(grammar_source, GrammarParser) + parser_source = generate_c_parser_source(grammar) + assert "expect_soft_keyword" not in parser_source + + def test_soft_keywords(self) -> None: + grammar_source = """ + start: expr+ NEWLINE? ENDMARKER + expr: "foo" + """ + grammar = parse_string(grammar_source, GrammarParser) + parser_source = generate_c_parser_source(grammar) + assert "expect_soft_keyword" in parser_source + + def test_soft_keywords_parse(self) -> None: + grammar_source = """ + start: "if" expr '+' expr NEWLINE + expr: NAME + """ + test_source = """ + valid_cases = ["if if + if"] + invalid_cases = ["if if"] + self.check_input_strings_for_grammar(valid_cases, invalid_cases) + """ + self.run_test(grammar_source, test_source) diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index cd87a9ffd9365..ee30c2c0688f8 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -753,6 +753,30 @@ _PyPegen_expect_token(Parser *p, int type) return t; } +expr_ty +_PyPegen_expect_soft_keyword(Parser *p, const char *keyword) +{ + if (p->mark == p->fill) { + if (_PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + return NULL; + } + } + Token *t = p->tokens[p->mark]; + if (t->type != NAME) { + return NULL; + } + char* s = PyBytes_AsString(t->bytes); + if (!s) { + return NULL; + } + if (strcmp(s, keyword) != 0) { + return NULL; + } + expr_ty res = _PyPegen_name_token(p); + return res; +} + Token * _PyPegen_get_last_nonnwhitespace_token(Parser *p) { diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index bd3056e6f2b80..9507d9955ae32 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -122,6 +122,7 @@ int _PyPegen_lookahead_with_int(int, Token *(func)(Parser *, int), Parser *, int int _PyPegen_lookahead(int, void *(func)(Parser *), Parser *); Token *_PyPegen_expect_token(Parser *p, int type); +expr_ty _PyPegen_expect_soft_keyword(Parser *p, const char *keyword); Token *_PyPegen_get_last_nonnwhitespace_token(Parser *); int _PyPegen_fill_token(Parser *p); expr_ty _PyPegen_name_token(Parser *p); diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 8bc23911bbbc2..885ff05858f67 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -117,6 +117,16 @@ def keyword_helper(self, keyword: str) -> FunctionCall: comment=f"token='{keyword}'", ) + def soft_keyword_helper(self, value: str) -> FunctionCall: + return FunctionCall( + assigned_variable="_keyword", + function="_PyPegen_expect_soft_keyword", + arguments=["p", value], + return_type="expr_ty", + nodetype=NodeTypes.NAME_TOKEN, + comment=f"soft_keyword='{value}'", + ) + def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: name = node.value if name in self.non_exact_tokens: @@ -154,7 +164,10 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: val = ast.literal_eval(node.value) if re.match(r"[a-zA-Z_]\w*\Z", val): # This is a keyword - return self.keyword_helper(val) + if node.value.endswith("'"): + return self.keyword_helper(val) + else: + return self.soft_keyword_helper(node.value) else: assert val in self.exact_tokens, f"{node.value} is not a known literal" type = self.exact_tokens[val] @@ -656,8 +669,9 @@ def handle_alt_normal(self, node: Alt, is_gather: bool, rulename: Optional[str]) self.print("{") # We have parsed successfully all the conditions for the option. with self.indent(): + node_str = str(node).replace('"', '\\"') self.print( - f'D(fprintf(stderr, "%*c+ {rulename}[%d-%d]: %s succeeded!\\n", p->level, \' \', _mark, p->mark, "{node}"));' + f'D(fprintf(stderr, "%*c+ {rulename}[%d-%d]: %s succeeded!\\n", p->level, \' \', _mark, p->mark, "{node_str}"));' ) # Prepare to emmit the rule action and do so if node.action and "EXTRA" in node.action: @@ -710,8 +724,9 @@ def visit_Alt( self.print(f"{{ // {node}") with self.indent(): self._check_for_errors() + node_str = str(node).replace('"', '\\"') self.print( - f'D(fprintf(stderr, "%*c> {rulename}[%d-%d]: %s\\n", p->level, \' \', _mark, p->mark, "{node}"));' + f'D(fprintf(stderr, "%*c> {rulename}[%d-%d]: %s\\n", p->level, \' \', _mark, p->mark, "{node_str}"));' ) # Prepare variable declarations for the alternative vars = self.collect_vars(node) @@ -733,9 +748,10 @@ def visit_Alt( self.handle_alt_normal(node, is_gather, rulename) self.print("p->mark = _mark;") + node_str = str(node).replace('"', '\\"') self.print( f"D(fprintf(stderr, \"%*c%s {rulename}[%d-%d]: %s failed!\\n\", p->level, ' ',\n" - f' p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "{node}"));' + f' p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "{node_str}"));' ) if "_cut_var" in vars: self.print("if (_cut_var) {") From webhook-mailer at python.org Tue May 26 17:42:28 2020 From: webhook-mailer at python.org (Ethan Steinberg) Date: Tue, 26 May 2020 21:42:28 -0000 Subject: [Python-checkins] bpo-40611: Adds MAP_POPULATE to the mmap module (GH-20061) Message-ID: https://github.com/python/cpython/commit/21fda91f8da96406e6a912f7c312424209c19bef commit: 21fda91f8da96406e6a912f7c312424209c19bef branch: master author: Ethan Steinberg committer: GitHub date: 2020-05-26T23:42:18+02:00 summary: bpo-40611: Adds MAP_POPULATE to the mmap module (GH-20061) MAP_POPULATE constant has now been added to the list of exported mmap module flags. files: A Misc/NEWS.d/next/Library/2020-05-13-16-28-33.bpo-40611.ZCk0_c.rst M Doc/library/mmap.rst M Modules/mmapmodule.c diff --git a/Doc/library/mmap.rst b/Doc/library/mmap.rst index 1f3fbc340fc26..698c17653786b 100644 --- a/Doc/library/mmap.rst +++ b/Doc/library/mmap.rst @@ -81,7 +81,9 @@ To map anonymous memory, -1 should be passed as the fileno along with the length private copy-on-write mapping, so changes to the contents of the mmap object will be private to this process, and :const:`MAP_SHARED` creates a mapping that's shared with all other processes mapping the same areas of - the file. The default value is :const:`MAP_SHARED`. + the file. The default value is :const:`MAP_SHARED`. Some systems have + additional possible flags with the full list specified in + :ref:`MAP_* constants `. *prot*, if specified, gives the desired memory protection; the two most useful values are :const:`PROT_READ` and :const:`PROT_WRITE`, to specify @@ -342,3 +344,21 @@ MADV_* Constants Availability: Systems with the madvise() system call. .. versionadded:: 3.8 + +.. _map-constants: + +MAP_* Constants ++++++++++++++++ + +.. data:: MAP_SHARED + MAP_PRIVATE + MAP_DENYWRITE + MAP_EXECUTABLE + MAP_ANON + MAP_ANONYMOUS + MAP_POPULATE + + These are the various flags that can be passed to :meth:`mmap.mmap`. Note that some options might not be present on some systems. + + .. versionchanged:: 3.10 + Added MAP_POPULATE constant. diff --git a/Misc/NEWS.d/next/Library/2020-05-13-16-28-33.bpo-40611.ZCk0_c.rst b/Misc/NEWS.d/next/Library/2020-05-13-16-28-33.bpo-40611.ZCk0_c.rst new file mode 100644 index 0000000000000..50ef3ad200a5e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-13-16-28-33.bpo-40611.ZCk0_c.rst @@ -0,0 +1 @@ +:data:`~mmap.MAP_POPULATE` constant has now been added to the list of exported :mod:`mmap` module flags. diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index a3e22d0a5110d..8a60db1e1c469 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -1574,6 +1574,9 @@ PyInit_mmap(void) setint(dict, "MAP_ANON", MAP_ANONYMOUS); setint(dict, "MAP_ANONYMOUS", MAP_ANONYMOUS); #endif +#ifdef MAP_POPULATE + setint(dict, "MAP_POPULATE", MAP_POPULATE); +#endif setint(dict, "PAGESIZE", (long)my_getpagesize()); From webhook-mailer at python.org Tue May 26 19:16:01 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Tue, 26 May 2020 23:16:01 -0000 Subject: [Python-checkins] Fix lookahead of soft keywords in the PEG parser (GH-20436) Message-ID: https://github.com/python/cpython/commit/404b23b85b17c84e022779f31fc89cb0ed0d37e8 commit: 404b23b85b17c84e022779f31fc89cb0ed0d37e8 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-26T16:15:52-07:00 summary: Fix lookahead of soft keywords in the PEG parser (GH-20436) Automerge-Triggered-By: @gvanrossum files: M Lib/test/test_peg_generator/test_c_parser.py M Parser/pegen/pegen.c M Parser/pegen/pegen.h M Tools/peg_generator/pegen/c_generator.py diff --git a/Lib/test/test_peg_generator/test_c_parser.py b/Lib/test/test_peg_generator/test_c_parser.py index 72383d5b5a631..a5d88501f77ad 100644 --- a/Lib/test/test_peg_generator/test_c_parser.py +++ b/Lib/test/test_peg_generator/test_c_parser.py @@ -432,3 +432,15 @@ def test_soft_keywords_parse(self) -> None: self.check_input_strings_for_grammar(valid_cases, invalid_cases) """ self.run_test(grammar_source, test_source) + + def test_soft_keywords_lookahead(self) -> None: + grammar_source = """ + start: &"if" "if" expr '+' expr NEWLINE + expr: NAME + """ + test_source = """ + valid_cases = ["if if + if"] + invalid_cases = ["if if"] + self.check_input_strings_for_grammar(valid_cases, invalid_cases) + """ + self.run_test(grammar_source, test_source) diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index ee30c2c0688f8..a0285bcb60e95 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -718,6 +718,15 @@ _PyPegen_lookahead_with_name(int positive, expr_ty (func)(Parser *), Parser *p) return (res != NULL) == positive; } +int +_PyPegen_lookahead_with_string(int positive, expr_ty (func)(Parser *, const char*), Parser *p, const char* arg) +{ + int mark = p->mark; + void *res = func(p, arg); + p->mark = mark; + return (res != NULL) == positive; +} + int _PyPegen_lookahead_with_int(int positive, Token *(func)(Parser *, int), Parser *p, int arg) { diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index 9507d9955ae32..64cf0ec892913 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -119,6 +119,7 @@ int _PyPegen_is_memoized(Parser *p, int type, void *pres); int _PyPegen_lookahead_with_name(int, expr_ty (func)(Parser *), Parser *); int _PyPegen_lookahead_with_int(int, Token *(func)(Parser *, int), Parser *, int); +int _PyPegen_lookahead_with_string(int , expr_ty (func)(Parser *, const char*), Parser *, const char*); int _PyPegen_lookahead(int, void *(func)(Parser *), Parser *); Token *_PyPegen_expect_token(Parser *p, int type); diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 885ff05858f67..ce1d6bb7bf355 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -58,7 +58,8 @@ class NodeTypes(Enum): STRING_TOKEN = 2 GENERIC_TOKEN = 3 KEYWORD = 4 - CUT_OPERATOR = 5 + SOFT_KEYWORD = 5 + CUT_OPERATOR = 6 BASE_NODETYPES = { @@ -123,7 +124,7 @@ def soft_keyword_helper(self, value: str) -> FunctionCall: function="_PyPegen_expect_soft_keyword", arguments=["p", value], return_type="expr_ty", - nodetype=NodeTypes.NAME_TOKEN, + nodetype=NodeTypes.SOFT_KEYWORD, comment=f"soft_keyword='{value}'", ) @@ -217,6 +218,12 @@ def lookahead_call_helper(self, node: Lookahead, positive: int) -> FunctionCall: arguments=[positive, call.function, *call.arguments], return_type="int", ) + elif call.nodetype == NodeTypes.SOFT_KEYWORD: + return FunctionCall( + function=f"_PyPegen_lookahead_with_string", + arguments=[positive, call.function, *call.arguments], + return_type="int", + ) elif call.nodetype in {NodeTypes.GENERIC_TOKEN, NodeTypes.KEYWORD}: return FunctionCall( function=f"_PyPegen_lookahead_with_int", From webhook-mailer at python.org Wed May 27 05:03:47 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Wed, 27 May 2020 09:03:47 -0000 Subject: [Python-checkins] bpo-40217: Ensure Py_VISIT(Py_TYPE(self)) is always called for PyType_FromSpec types (reverts GH-19414) (GH-20264) Message-ID: https://github.com/python/cpython/commit/1cf15af9a6f28750f37b08c028ada31d38e818dd commit: 1cf15af9a6f28750f37b08c028ada31d38e818dd branch: master author: Pablo Galindo committer: GitHub date: 2020-05-27T02:03:38-07:00 summary: bpo-40217: Ensure Py_VISIT(Py_TYPE(self)) is always called for PyType_FromSpec types (reverts GH-19414) (GH-20264) Heap types now always visit the type in tp_traverse. See added docs for details. This reverts commit 0169d3003be3d072751dd14a5c84748ab63a249f. Automerge-Triggered-By: @encukou files: A Misc/NEWS.d/next/Core and Builtins/2020-05-23-01-15-51.bpo-40217.jZsHTc.rst M Doc/c-api/typeobj.rst M Doc/whatsnew/3.9.rst M Modules/_abc.c M Modules/_curses_panel.c M Modules/_json.c M Modules/_struct.c M Modules/xxlimited.c M Objects/structseq.c M Objects/typeobject.c M Parser/asdl_c.py M Python/Python-ast.c diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index ce4e8c926b294..385c7f94c672f 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -1223,11 +1223,25 @@ and :c:type:`PyType_Type` effectively act as defaults.) but the instance has no strong reference to the elements inside it, as they are allowed to be removed even if the instance is still alive). - Note that :c:func:`Py_VISIT` requires the *visit* and *arg* parameters to :c:func:`local_traverse` to have these specific names; don't name them just anything. + Heap-allocated types (:const:`Py_TPFLAGS_HEAPTYPE`, such as those created + with :c:func:`PyType_FromSpec` and similar APIs) hold a reference to their + type. Their traversal function must therefore either visit + :c:func:`Py_TYPE(self) `, or delegate this responsibility by + calling ``tp_traverse`` of another heap-allocated type (such as a + heap-allocated superclass). + If they do not, the type object may not be garbage-collected. + + .. versionchanged:: 3.9 + + Heap-allocated types are expected to visit ``Py_TYPE(self)`` in + ``tp_traverse``. In earlier versions of Python, due to + `bug 40217 `_, doing this + may lead to crashes in subclasses. + **Inheritance:** Group: :const:`Py_TPFLAGS_HAVE_GC`, :attr:`tp_traverse`, :attr:`tp_clear` diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index d72fea2c67968..8a04f72513357 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -933,6 +933,55 @@ Changes in the Python API (Contributed by Inada Naoki in :issue:`34538`.) +Changes in the C API +-------------------- + +* Instances of heap-allocated types (such as those created with + :c:func:`PyType_FromSpec` and similar APIs) hold a reference to their type + object since Python 3.8. As indicated in the "Changes in the C API" of Python + 3.8, for the vast majority of cases, there should be no side effect but for + types that have a custom :c:member:`~PyTypeObject.tp_traverse` function, + ensure that all custom ``tp_traverse`` functions of heap-allocated types + visit the object's type. + + Example: + + .. code-block:: c + + int + foo_traverse(foo_struct *self, visitproc visit, void *arg) { + // Rest of the traverse function + #if PY_VERSION_HEX >= 0x03090000 + // This was not needed before Python 3.9 (Python issue 35810 and 40217) + Py_VISIT(Py_TYPE(self)); + #endif + } + + If your traverse function delegates to ``tp_traverse`` of its base class + (or another type), ensure that ``Py_TYPE(self)`` is visited only once. + Note that only heap types are expected to visit the type in ``tp_traverse``. + + For example, if your ``tp_traverse`` function includes: + + .. code-block:: c + + base->tp_traverse(self, visit, arg) + + then add: + + .. code-block:: c + + #if PY_VERSION_HEX >= 0x03090000 + // This was not needed before Python 3.9 (Python issue 35810 and 40217) + if (base->tp_flags & Py_TPFLAGS_HEAPTYPE) { + // a heap type's tp_traverse already visited Py_TYPE(self) + } else { + Py_VISIT(Py_TYPE(self)); + } + #else + + (See :issue:`35810` and :issue:`40217` for more information.) + CPython bytecode changes ------------------------ diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-23-01-15-51.bpo-40217.jZsHTc.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-23-01-15-51.bpo-40217.jZsHTc.rst new file mode 100644 index 0000000000000..b13e8eeb0634f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-23-01-15-51.bpo-40217.jZsHTc.rst @@ -0,0 +1,4 @@ +Instances of types created with :c:func:`PyType_FromSpecWithBases` will no +longer automatically visit their class object when traversing references in +the garbage collector. The user is expected to manually visit the object's +class. Patch by Pablo Galindo. diff --git a/Modules/_abc.c b/Modules/_abc.c index 434bc454175b5..709b52ff96b29 100644 --- a/Modules/_abc.c +++ b/Modules/_abc.c @@ -46,6 +46,7 @@ typedef struct { static int abc_data_traverse(_abc_data *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->_abc_registry); Py_VISIT(self->_abc_cache); Py_VISIT(self->_abc_negative_cache); diff --git a/Modules/_curses_panel.c b/Modules/_curses_panel.c index 7ca91f641617a..f124803493d88 100644 --- a/Modules/_curses_panel.c +++ b/Modules/_curses_panel.c @@ -39,6 +39,7 @@ _curses_panel_clear(PyObject *m) static int _curses_panel_traverse(PyObject *m, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(m)); Py_VISIT(get_curses_panelstate(m)->PyCursesError); return 0; } diff --git a/Modules/_json.c b/Modules/_json.c index 075aa3d2f4f6c..faa3944eedd74 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -647,6 +647,7 @@ scanner_dealloc(PyObject *self) static int scanner_traverse(PyScannerObject *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->object_hook); Py_VISIT(self->object_pairs_hook); Py_VISIT(self->parse_float); @@ -1745,6 +1746,7 @@ encoder_dealloc(PyObject *self) static int encoder_traverse(PyEncoderObject *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->markers); Py_VISIT(self->defaultfn); Py_VISIT(self->encoder); diff --git a/Modules/_struct.c b/Modules/_struct.c index 5984bb6811436..f759f0b169418 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -1646,6 +1646,7 @@ unpackiter_dealloc(unpackiterobject *self) static int unpackiter_traverse(unpackiterobject *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->so); Py_VISIT(self->buf.obj); return 0; diff --git a/Modules/xxlimited.c b/Modules/xxlimited.c index 7ce0b6ec88051..5b05a9454a05d 100644 --- a/Modules/xxlimited.c +++ b/Modules/xxlimited.c @@ -43,6 +43,7 @@ newXxoObject(PyObject *arg) static int Xxo_traverse(XxoObject *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->x_attr); return 0; } diff --git a/Objects/structseq.c b/Objects/structseq.c index 9bdda87ae0be0..b17b1f99a5bc6 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -70,6 +70,9 @@ PyStructSequence_GetItem(PyObject* op, Py_ssize_t i) static int structseq_traverse(PyStructSequence *obj, visitproc visit, void *arg) { + if (Py_TYPE(obj)->tp_flags & Py_TPFLAGS_HEAPTYPE) { + Py_VISIT(Py_TYPE(obj)); + } Py_ssize_t i, size; size = REAL_SIZE(obj); for (i = 0; i < size; ++i) { diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 0e055d677f139..ba2a852cdda4f 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -1039,42 +1039,6 @@ type_call(PyTypeObject *type, PyObject *args, PyObject *kwds) return obj; } -PyObject * -PyType_FromSpec_Alloc(PyTypeObject *type, Py_ssize_t nitems) -{ - PyObject *obj; - const size_t size = _Py_SIZE_ROUND_UP( - _PyObject_VAR_SIZE(type, nitems+1) + sizeof(traverseproc), - SIZEOF_VOID_P); - /* note that we need to add one, for the sentinel and space for the - provided tp-traverse: See bpo-40217 for more details */ - - if (PyType_IS_GC(type)) { - obj = _PyObject_GC_Malloc(size); - } - else { - obj = (PyObject *)PyObject_MALLOC(size); - } - - if (obj == NULL) { - return PyErr_NoMemory(); - } - - memset(obj, '\0', size); - - if (type->tp_itemsize == 0) { - (void)PyObject_INIT(obj, type); - } - else { - (void) PyObject_INIT_VAR((PyVarObject *)obj, type, nitems); - } - - if (PyType_IS_GC(type)) { - _PyObject_GC_TRACK(obj); - } - return obj; -} - PyObject * PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems) { @@ -1164,11 +1128,16 @@ subtype_traverse(PyObject *self, visitproc visit, void *arg) Py_VISIT(*dictptr); } - if (type->tp_flags & Py_TPFLAGS_HEAPTYPE) + if (type->tp_flags & Py_TPFLAGS_HEAPTYPE + && (!basetraverse || !(base->tp_flags & Py_TPFLAGS_HEAPTYPE))) { /* For a heaptype, the instances count as references to the type. Traverse the type so the collector - can find cycles involving this link. */ + can find cycles involving this link. + Skip this visit if basetraverse belongs to a heap type: in that + case, basetraverse will visit the type when we call it later. + */ Py_VISIT(type); + } if (basetraverse) return basetraverse(self, visit, arg); @@ -2910,36 +2879,6 @@ static const short slotoffsets[] = { #include "typeslots.inc" }; -static int -PyType_FromSpec_tp_traverse(PyObject *self, visitproc visit, void *arg) -{ - PyTypeObject *parent = Py_TYPE(self); - - // Only a instance of a type that is directly created by - // PyType_FromSpec (not subclasses) must visit its parent. - if (parent->tp_traverse == PyType_FromSpec_tp_traverse) { - Py_VISIT(parent); - } - - // Search for the original type that was created using PyType_FromSpec - PyTypeObject *base; - base = parent; - while (base->tp_traverse != PyType_FromSpec_tp_traverse) { - base = base->tp_base; - assert(base); - } - - // Extract the user defined traverse function that we placed at the end - // of the type and call it. - size_t size = Py_SIZE(base); - size_t _offset = _PyObject_VAR_SIZE(&PyType_Type, size+1); - traverseproc fun = *(traverseproc*)((char*)base + _offset); - if (fun == NULL) { - return 0; - } - return fun(self, visit, arg); -} - PyObject * PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) { @@ -2985,7 +2924,7 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) } } - res = (PyHeapTypeObject*)PyType_FromSpec_Alloc(&PyType_Type, nmembers); + res = (PyHeapTypeObject*)PyType_GenericAlloc(&PyType_Type, nmembers); if (res == NULL) return NULL; res_start = (char*)res; @@ -3093,30 +3032,6 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) memcpy(PyHeapType_GET_MEMBERS(res), slot->pfunc, len); type->tp_members = PyHeapType_GET_MEMBERS(res); } - else if (slot->slot == Py_tp_traverse) { - - /* Types created by PyType_FromSpec own a strong reference to their - * type, but this was added in Python 3.8. The tp_traverse function - * needs to call Py_VISIT on the type but all existing traverse - * functions cannot be updated (especially the ones from existing user - * functions) so we need to provide a tp_traverse that manually calls - * Py_VISIT(Py_TYPE(self)) and then call the provided tp_traverse. In - * this way, user functions do not need to be updated, preserve - * backwards compatibility. - * - * We store the user-provided traverse function at the end of the type - * (we have allocated space for it) so we can call it from our - * PyType_FromSpec_tp_traverse wrapper. - * - * Check bpo-40217 for more information and rationale about this issue. - * - * */ - - type->tp_traverse = PyType_FromSpec_tp_traverse; - size_t _offset = _PyObject_VAR_SIZE(&PyType_Type, nmembers+1); - traverseproc *user_traverse = (traverseproc*)((char*)type + _offset); - *user_traverse = slot->pfunc; - } else { /* Copy other slots directly */ *(void**)(res_start + slotoffsets[slot->slot]) = slot->pfunc; diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index f8729cd170b10..ce9724aee3ed8 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -673,6 +673,7 @@ def visitModule(self, mod): static int ast_traverse(AST_object *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->dict); return 0; } diff --git a/Python/Python-ast.c b/Python/Python-ast.c index d2edf74c81216..694987dd07788 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -1109,6 +1109,7 @@ ast_dealloc(AST_object *self) static int ast_traverse(AST_object *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->dict); return 0; } From webhook-mailer at python.org Wed May 27 06:28:49 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 10:28:49 -0000 Subject: [Python-checkins] Fix the link to ncurses patch download in macos installer build script (GH-20421) Message-ID: https://github.com/python/cpython/commit/7c82528d40acae2edf0eb73632e65b044792eeac commit: 7c82528d40acae2edf0eb73632e65b044792eeac branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T03:28:45-07:00 summary: Fix the link to ncurses patch download in macos installer build script (GH-20421) Reason: the link `ftp://invisible-island.net/ncurses//5.9/ncurses-5.9-20120616-patch.sh.bz2` is dead, which prevents `Mac/BuildScript/build-installer.py` from completing. Looks like the host of the FTP server was changed to `ftp.invisible-island.net`, thus this proposal. Signed-off-by: oleg.hoefling (cherry picked from commit 7da46b676aed7111de34b57c8b942a7f3bb80327) Co-authored-by: Oleg H?fling files: M Mac/BuildScript/build-installer.py diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 0ad7298e98242..bdfa6f4adf612 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -302,7 +302,7 @@ def library_recipes(): "--libdir=/Library/Frameworks/Python.framework/Versions/%s/lib"%(getVersion(),), ], patchscripts=[ - ("ftp://invisible-island.net/ncurses//5.9/ncurses-5.9-20120616-patch.sh.bz2", + ("ftp://ftp.invisible-island.net/ncurses//5.9/ncurses-5.9-20120616-patch.sh.bz2", "f54bf02a349f96a7c4f0d00922f3a0d4"), ], useLDFlags=False, From webhook-mailer at python.org Wed May 27 06:29:29 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 10:29:29 -0000 Subject: [Python-checkins] Fix the link to ncurses patch download in macos installer build script (GH-20421) Message-ID: https://github.com/python/cpython/commit/c8e107607d8a2c9a149cf6a706d508a556729892 commit: c8e107607d8a2c9a149cf6a706d508a556729892 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T03:29:25-07:00 summary: Fix the link to ncurses patch download in macos installer build script (GH-20421) Reason: the link `ftp://invisible-island.net/ncurses//5.9/ncurses-5.9-20120616-patch.sh.bz2` is dead, which prevents `Mac/BuildScript/build-installer.py` from completing. Looks like the host of the FTP server was changed to `ftp.invisible-island.net`, thus this proposal. Signed-off-by: oleg.hoefling (cherry picked from commit 7da46b676aed7111de34b57c8b942a7f3bb80327) Co-authored-by: Oleg H?fling files: M Mac/BuildScript/build-installer.py diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index d3a0c182ccae7..2b48cdfb860f7 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -302,7 +302,7 @@ def library_recipes(): "--libdir=/Library/Frameworks/Python.framework/Versions/%s/lib"%(getVersion(),), ], patchscripts=[ - ("ftp://invisible-island.net/ncurses//5.9/ncurses-5.9-20120616-patch.sh.bz2", + ("ftp://ftp.invisible-island.net/ncurses//5.9/ncurses-5.9-20120616-patch.sh.bz2", "f54bf02a349f96a7c4f0d00922f3a0d4"), ], useLDFlags=False, From webhook-mailer at python.org Wed May 27 06:49:39 2020 From: webhook-mailer at python.org (Xavier Fernandez) Date: Wed, 27 May 2020 10:49:39 -0000 Subject: [Python-checkins] Upgrade bundled versions of pip & setuptools (#16782) Message-ID: https://github.com/python/cpython/commit/feb0846c3a28b05b4cfbc6ab34c764957f3eff55 commit: feb0846c3a28b05b4cfbc6ab34c764957f3eff55 branch: master author: Xavier Fernandez committer: GitHub date: 2020-05-27T20:49:34+10:00 summary: Upgrade bundled versions of pip & setuptools (#16782) files: A Lib/ensurepip/_bundled/pip-20.1-py2.py3-none-any.whl A Lib/ensurepip/_bundled/setuptools-46.1.3-py3-none-any.whl A Misc/NEWS.d/next/Library/2019-10-15-23-28-11.bpo-38488.hFQNgA.rst D Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl D Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl M Lib/ensurepip/__init__.py diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 545fce656fd6f..1ec49714c6459 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -12,13 +12,13 @@ __all__ = ["version", "bootstrap"] -_SETUPTOOLS_VERSION = "41.2.0" +_SETUPTOOLS_VERSION = "46.1.3" -_PIP_VERSION = "19.2.3" +_PIP_VERSION = "20.1" _PROJECTS = [ - ("setuptools", _SETUPTOOLS_VERSION), - ("pip", _PIP_VERSION), + ("setuptools", _SETUPTOOLS_VERSION, 'py3'), + ("pip", _PIP_VERSION, 'py2.py3'), ] @@ -107,8 +107,8 @@ def _bootstrap(*, root=None, upgrade=False, user=False, # Put our bundled wheels into a temporary directory and construct the # additional paths that need added to sys.path additional_paths = [] - for project, version in _PROJECTS: - wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) + for project, version, py_tag in _PROJECTS: + wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag) whl = resources.read_binary( _bundled, wheel_name, diff --git a/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl deleted file mode 100644 index 8118df8ac1940..0000000000000 Binary files a/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl and /dev/null differ diff --git a/Lib/ensurepip/_bundled/pip-20.1-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-20.1-py2.py3-none-any.whl new file mode 100644 index 0000000000000..925a59f4c2d8a Binary files /dev/null and b/Lib/ensurepip/_bundled/pip-20.1-py2.py3-none-any.whl differ diff --git a/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/setuptools-46.1.3-py3-none-any.whl similarity index 65% rename from Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl rename to Lib/ensurepip/_bundled/setuptools-46.1.3-py3-none-any.whl index 82df6f63f4ee9..fc3f6ccf4277d 100644 Binary files a/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl and b/Lib/ensurepip/_bundled/setuptools-46.1.3-py3-none-any.whl differ diff --git a/Misc/NEWS.d/next/Library/2019-10-15-23-28-11.bpo-38488.hFQNgA.rst b/Misc/NEWS.d/next/Library/2019-10-15-23-28-11.bpo-38488.hFQNgA.rst new file mode 100644 index 0000000000000..95cf2f1b5ed46 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-10-15-23-28-11.bpo-38488.hFQNgA.rst @@ -0,0 +1 @@ +Update ensurepip to install pip 20.1 and setuptools 46.1.3. From webhook-mailer at python.org Wed May 27 08:43:21 2020 From: webhook-mailer at python.org (Mark Dickinson) Date: Wed, 27 May 2020 12:43:21 -0000 Subject: [Python-checkins] bpo-37999: Fix outdated __int__ and nb_int references in comments (GH-20449) Message-ID: https://github.com/python/cpython/commit/20941de0ddc39ce9f07e29b4cc770e8a9ef14d41 commit: 20941de0ddc39ce9f07e29b4cc770e8a9ef14d41 branch: master author: Mark Dickinson committer: GitHub date: 2020-05-27T13:43:17+01:00 summary: bpo-37999: Fix outdated __int__ and nb_int references in comments (GH-20449) * Fix outdated __int__ and nb_int references in comments * Also update C-API documentation * Add back missing 'method' word * Remove .. deprecated notices files: M Doc/c-api/long.rst M Objects/longobject.c diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index c5c2aa60dcc35..a7bd43df90689 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -129,9 +129,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. single: OverflowError (built-in exception) Return a C :c:type:`long` representation of *obj*. If *obj* is not an - instance of :c:type:`PyLongObject`, first call its :meth:`__index__` or - :meth:`__int__` method (if present) to convert it to a - :c:type:`PyLongObject`. + instance of :c:type:`PyLongObject`, first call its :meth:`__index__` method + (if present) to convert it to a :c:type:`PyLongObject`. Raise :exc:`OverflowError` if the value of *obj* is out of range for a :c:type:`long`. @@ -141,16 +140,15 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionchanged:: 3.8 Use :meth:`__index__` if available. - .. deprecated:: 3.8 - Using :meth:`__int__` is deprecated. + .. versionchanged:: 3.10 + This function will no longer use :meth:`__int__`. .. c:function:: long PyLong_AsLongAndOverflow(PyObject *obj, int *overflow) Return a C :c:type:`long` representation of *obj*. If *obj* is not an - instance of :c:type:`PyLongObject`, first call its :meth:`__index__` or - :meth:`__int__` method (if present) to convert it to a - :c:type:`PyLongObject`. + instance of :c:type:`PyLongObject`, first call its :meth:`__index__` + method (if present) to convert it to a :c:type:`PyLongObject`. If the value of *obj* is greater than :const:`LONG_MAX` or less than :const:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, and @@ -162,8 +160,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionchanged:: 3.8 Use :meth:`__index__` if available. - .. deprecated:: 3.8 - Using :meth:`__int__` is deprecated. + .. versionchanged:: 3.10 + This function will no longer use :meth:`__int__`. .. c:function:: long long PyLong_AsLongLong(PyObject *obj) @@ -172,9 +170,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. single: OverflowError (built-in exception) Return a C :c:type:`long long` representation of *obj*. If *obj* is not an - instance of :c:type:`PyLongObject`, first call its :meth:`__index__` or - :meth:`__int__` method (if present) to convert it to a - :c:type:`PyLongObject`. + instance of :c:type:`PyLongObject`, first call its :meth:`__index__` method + (if present) to convert it to a :c:type:`PyLongObject`. Raise :exc:`OverflowError` if the value of *obj* is out of range for a :c:type:`long long`. @@ -184,16 +181,15 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionchanged:: 3.8 Use :meth:`__index__` if available. - .. deprecated:: 3.8 - Using :meth:`__int__` is deprecated. + .. versionchanged:: 3.10 + This function will no longer use :meth:`__int__`. .. c:function:: long long PyLong_AsLongLongAndOverflow(PyObject *obj, int *overflow) Return a C :c:type:`long long` representation of *obj*. If *obj* is not an - instance of :c:type:`PyLongObject`, first call its :meth:`__index__` or - :meth:`__int__` method (if present) to convert it to a - :c:type:`PyLongObject`. + instance of :c:type:`PyLongObject`, first call its :meth:`__index__` method + (if present) to convert it to a :c:type:`PyLongObject`. If the value of *obj* is greater than :const:`LLONG_MAX` or less than :const:`LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, @@ -207,8 +203,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionchanged:: 3.8 Use :meth:`__index__` if available. - .. deprecated:: 3.8 - Using :meth:`__int__` is deprecated. + .. versionchanged:: 3.10 + This function will no longer use :meth:`__int__`. .. c:function:: Py_ssize_t PyLong_AsSsize_t(PyObject *pylong) @@ -278,10 +274,9 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. c:function:: unsigned long PyLong_AsUnsignedLongMask(PyObject *obj) - Return a C :c:type:`unsigned long` representation of *obj*. If *obj* - is not an instance of :c:type:`PyLongObject`, first call its - :meth:`__index__` or :meth:`__int__` method (if present) to convert - it to a :c:type:`PyLongObject`. + Return a C :c:type:`unsigned long` representation of *obj*. If *obj* is not + an instance of :c:type:`PyLongObject`, first call its :meth:`__index__` + method (if present) to convert it to a :c:type:`PyLongObject`. If the value of *obj* is out of range for an :c:type:`unsigned long`, return the reduction of that value modulo ``ULONG_MAX + 1``. @@ -292,16 +287,16 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionchanged:: 3.8 Use :meth:`__index__` if available. - .. deprecated:: 3.8 - Using :meth:`__int__` is deprecated. + .. versionchanged:: 3.10 + This function will no longer use :meth:`__int__`. .. c:function:: unsigned long long PyLong_AsUnsignedLongLongMask(PyObject *obj) Return a C :c:type:`unsigned long long` representation of *obj*. If *obj* is not an instance of :c:type:`PyLongObject`, first call its - :meth:`__index__` or :meth:`__int__` method (if present) to convert - it to a :c:type:`PyLongObject`. + :meth:`__index__` method (if present) to convert it to a + :c:type:`PyLongObject`. If the value of *obj* is out of range for an :c:type:`unsigned long long`, return the reduction of that value modulo ``ULLONG_MAX + 1``. @@ -312,8 +307,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionchanged:: 3.8 Use :meth:`__index__` if available. - .. deprecated:: 3.8 - Using :meth:`__int__` is deprecated. + .. versionchanged:: 3.10 + This function will no longer use :meth:`__int__`. .. c:function:: double PyLong_AsDouble(PyObject *pylong) diff --git a/Objects/longobject.c b/Objects/longobject.c index a409948e4aca4..e040d6c87b923 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -363,7 +363,7 @@ PyLong_FromDouble(double dval) #define PY_ABS_LONG_MIN (0-(unsigned long)LONG_MIN) #define PY_ABS_SSIZE_T_MIN (0-(size_t)PY_SSIZE_T_MIN) -/* Get a C long int from an int object or any object that has an __int__ +/* Get a C long int from an int object or any object that has an __index__ method. On overflow, return -1 and set *overflow to 1 or -1 depending on the sign of @@ -382,7 +382,7 @@ PyLong_AsLongAndOverflow(PyObject *vv, int *overflow) long res; Py_ssize_t i; int sign; - int do_decref = 0; /* if nb_int was called */ + int do_decref = 0; /* if PyNumber_Index was called */ *overflow = 0; if (vv == NULL) { @@ -449,7 +449,7 @@ PyLong_AsLongAndOverflow(PyObject *vv, int *overflow) return res; } -/* Get a C long int from an int object or any object that has an __int__ +/* Get a C long int from an int object or any object that has an __index__ method. Return -1 and set an error if overflow occurs. */ long @@ -466,7 +466,7 @@ PyLong_AsLong(PyObject *obj) return result; } -/* Get a C int from an int object or any object that has an __int__ +/* Get a C int from an int object or any object that has an __index__ method. Return -1 and set an error if overflow occurs. */ int @@ -1113,7 +1113,7 @@ PyLong_FromSsize_t(Py_ssize_t ival) } /* Get a C long long int from an int object or any object that has an - __int__ method. Return -1 and set an error if overflow occurs. */ + __index__ method. Return -1 and set an error if overflow occurs. */ long long PyLong_AsLongLong(PyObject *vv) @@ -1121,7 +1121,7 @@ PyLong_AsLongLong(PyObject *vv) PyLongObject *v; long long bytes; int res; - int do_decref = 0; /* if nb_int was called */ + int do_decref = 0; /* if PyNumber_Index was called */ if (vv == NULL) { PyErr_BadInternalCall(); @@ -1257,7 +1257,7 @@ PyLong_AsUnsignedLongLongMask(PyObject *op) } /* Get a C long long int from an int object or any object that has an - __int__ method. + __index__ method. On overflow, return -1 and set *overflow to 1 or -1 depending on the sign of the result. Otherwise *overflow is 0. @@ -1275,7 +1275,7 @@ PyLong_AsLongLongAndOverflow(PyObject *vv, int *overflow) long long res; Py_ssize_t i; int sign; - int do_decref = 0; /* if nb_int was called */ + int do_decref = 0; /* if PyNumber_Index was called */ *overflow = 0; if (vv == NULL) { From webhook-mailer at python.org Wed May 27 08:55:16 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 27 May 2020 12:55:16 -0000 Subject: [Python-checkins] bpo-39573: Convert Py_REFCNT and Py_SIZE to functions (GH-20429) Message-ID: https://github.com/python/cpython/commit/fe2978b3b940fe2478335e3a2ca5ad22338cdf9c commit: fe2978b3b940fe2478335e3a2ca5ad22338cdf9c branch: master author: Victor Stinner committer: GitHub date: 2020-05-27T14:55:10+02:00 summary: bpo-39573: Convert Py_REFCNT and Py_SIZE to functions (GH-20429) Convert Py_REFCNT() and Py_SIZE() macros to static inline functions. They cannot be used as l-value anymore: use Py_SET_REFCNT() and Py_SET_SIZE() to set an object reference count and size. Replace &Py_SIZE(self) with &((PyVarObject*)self)->ob_size in arraymodule.c. This change is backward incompatible on purpose, to prepare the C API for an opaque PyObject structure. files: A Misc/NEWS.d/next/C API/2020-05-26-16-21-47.bpo-39573.depAgq.rst M Doc/c-api/structures.rst M Doc/whatsnew/3.10.rst M Include/object.h M Modules/arraymodule.c M Objects/tupleobject.c diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 5535f42ac120a..b2392fa5e19c5 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -88,13 +88,13 @@ the definition of all other Python objects. .. versionadded:: 3.9 -.. c:macro:: Py_REFCNT(o) +.. c:function:: Py_ssize_t Py_REFCNT(const PyObject *o) - This macro is used to access the :attr:`ob_refcnt` member of a Python - object. - It expands to:: + Get the reference count of the Python object *o*. - (((PyObject*)(o))->ob_refcnt) + .. versionchanged:: 3.10 + :c:func:`Py_REFCNT()` is changed to the inline static function. + Use :c:func:`Py_SET_REFCNT()` to set an object reference count. .. c:function:: void Py_SET_REFCNT(PyObject *o, Py_ssize_t refcnt) @@ -104,12 +104,13 @@ the definition of all other Python objects. .. versionadded:: 3.9 -.. c:macro:: Py_SIZE(o) +.. c:function:: Py_ssize_t Py_SIZE(const PyVarObject *o) - This macro is used to access the :attr:`ob_size` member of a Python object. - It expands to:: + Get the size of the Python object *o*. - (((PyVarObject*)(o))->ob_size) + .. versionchanged:: 3.10 + :c:func:`Py_SIZE()` is changed to the inline static function. + Use :c:func:`Py_SET_SIZE()` to set an object size. .. c:function:: void Py_SET_SIZE(PyVarObject *o, Py_ssize_t size) diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index fabd9a2463e27..9edef1ed87ba1 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -138,6 +138,15 @@ Porting to Python 3.10 see :c:func:`Py_SET_TYPE()` (available since Python 3.9). (Contributed by Dong-hee Na in :issue:`39573`.) +* Since :c:func:`Py_REFCNT()` is changed to the inline static function, + ``Py_REFCNT(obj) = new_refcnt`` must be replaced with ``Py_SET_REFCNT(obj, new_refcnt)``: + see :c:func:`Py_SET_REFCNT()` (available since Python 3.9). + (Contributed by Victor Stinner in :issue:`39573`.) + +* Since :c:func:`Py_SIZE()` is changed to the inline static function, + ``Py_SIZE(obj) = new_size`` must be replaced with ``Py_SET_SIZE(obj, new_size)``: + see :c:func:`Py_SET_SIZE()` (available since Python 3.9). + (Contributed by Victor Stinner in :issue:`39573`.) Removed ------- diff --git a/Include/object.h b/Include/object.h index 5ad05699bb45c..537567040f987 100644 --- a/Include/object.h +++ b/Include/object.h @@ -119,30 +119,45 @@ typedef struct { /* Cast argument to PyVarObject* type. */ #define _PyVarObject_CAST(op) ((PyVarObject*)(op)) +#define _PyVarObject_CAST_CONST(op) ((const PyVarObject*)(op)) + + +static inline Py_ssize_t _Py_REFCNT(const PyObject *ob) { + return ob->ob_refcnt; +} +#define Py_REFCNT(ob) _Py_REFCNT(_PyObject_CAST_CONST(ob)) + + +static inline Py_ssize_t _Py_SIZE(const PyVarObject *ob) { + return ob->ob_size; +} +#define Py_SIZE(ob) _Py_SIZE(_PyVarObject_CAST_CONST(ob)) -#define Py_REFCNT(ob) (_PyObject_CAST(ob)->ob_refcnt) -#define Py_SIZE(ob) (_PyVarObject_CAST(ob)->ob_size) static inline PyTypeObject* _Py_TYPE(const PyObject *ob) { return ob->ob_type; } #define Py_TYPE(ob) _Py_TYPE(_PyObject_CAST_CONST(ob)) + static inline int _Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) { return ob->ob_type == type; } #define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST_CONST(ob), type) + static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) { ob->ob_refcnt = refcnt; } #define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT(_PyObject_CAST(ob), refcnt) + static inline void _Py_SET_TYPE(PyObject *ob, PyTypeObject *type) { ob->ob_type = type; } #define Py_SET_TYPE(ob, type) _Py_SET_TYPE(_PyObject_CAST(ob), type) + static inline void _Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) { ob->ob_size = size; } diff --git a/Misc/NEWS.d/next/C API/2020-05-26-16-21-47.bpo-39573.depAgq.rst b/Misc/NEWS.d/next/C API/2020-05-26-16-21-47.bpo-39573.depAgq.rst new file mode 100644 index 0000000000000..f8f675cebcac7 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-26-16-21-47.bpo-39573.depAgq.rst @@ -0,0 +1,5 @@ +Convert :c:func:`Py_REFCNT` and :c:func:`Py_SIZE` macros to static inline +functions. They cannot be used as l-value anymore: use +:c:func:`Py_SET_REFCNT` and :c:func:`Py_SET_SIZE` to set an object reference +count and size. This change is backward incompatible on purpose, to prepare +the C API for an opaque :c:type:`PyObject` structure. diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index fb1b82cd6a6ec..4c3ddc3ac2457 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -2525,14 +2525,14 @@ array_buffer_getbuf(arrayobject *self, Py_buffer *view, int flags) Py_INCREF(self); if (view->buf == NULL) view->buf = (void *)emptybuf; - view->len = (Py_SIZE(self)) * self->ob_descr->itemsize; + view->len = Py_SIZE(self) * self->ob_descr->itemsize; view->readonly = 0; view->ndim = 1; view->itemsize = self->ob_descr->itemsize; view->suboffsets = NULL; view->shape = NULL; if ((flags & PyBUF_ND)==PyBUF_ND) { - view->shape = &((Py_SIZE(self))); + view->shape = &((PyVarObject*)self)->ob_size; } view->strides = NULL; if ((flags & PyBUF_STRIDES)==PyBUF_STRIDES) diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index e4c0c91cfe819..43706c22b9291 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -82,7 +82,7 @@ tuple_alloc(Py_ssize_t size) numfree[size]--; /* Inline PyObject_InitVar */ #ifdef Py_TRACE_REFS - Py_SIZE(op) = size; + Py_SET_SIZE(op, size); Py_SET_TYPE(op, &PyTuple_Type); #endif _Py_NewReference((PyObject *)op); From webhook-mailer at python.org Wed May 27 09:05:08 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 13:05:08 -0000 Subject: [Python-checkins] Clean up compatibility code in importlib fixtures (GH-19156) (#19543) Message-ID: https://github.com/python/cpython/commit/5594c07d97cc56ec7fabc66c6a5c644d3b809612 commit: 5594c07d97cc56ec7fabc66c6a5c644d3b809612 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T09:05:03-04:00 summary: Clean up compatibility code in importlib fixtures (GH-19156) (#19543) (cherry picked from commit 574547a75c79b506261520c5773ae08a1dcea1b9) Co-authored-by: Jason R. Coombs Co-authored-by: Jason R. Coombs files: M Lib/test/test_importlib/fixtures.py diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py index 695c92a786cb0..d923cec26ea8f 100644 --- a/Lib/test/test_importlib/fixtures.py +++ b/Lib/test/test_importlib/fixtures.py @@ -1,25 +1,11 @@ -from __future__ import unicode_literals - import os import sys import shutil +import pathlib import tempfile import textwrap import contextlib -try: - from contextlib import ExitStack -except ImportError: - from contextlib2 import ExitStack - -try: - import pathlib -except ImportError: - import pathlib2 as pathlib - - -__metaclass__ = type - @contextlib.contextmanager def tempdir(): @@ -58,7 +44,7 @@ def install_finder(finder): class Fixtures: def setUp(self): - self.fixtures = ExitStack() + self.fixtures = contextlib.ExitStack() self.addCleanup(self.fixtures.close) From webhook-mailer at python.org Wed May 27 09:37:47 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 13:37:47 -0000 Subject: [Python-checkins] bpo-39073: validate Address parts to disallow CRLF (GH-19007) Message-ID: https://github.com/python/cpython/commit/75635c6095bcfbb9fccc239115d3d03ae20a307f commit: 75635c6095bcfbb9fccc239115d3d03ae20a307f branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T06:37:40-07:00 summary: bpo-39073: validate Address parts to disallow CRLF (GH-19007) Disallow CR or LF in email.headerregistry.Address arguments to guard against header injection attacks. (cherry picked from commit 614f17211c5fc0e5b828be1d3320661d1038fe8f) Co-authored-by: Ashwin Ramaswami files: A Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst M Lib/email/headerregistry.py M Lib/test/test_email/test_headerregistry.py diff --git a/Lib/email/headerregistry.py b/Lib/email/headerregistry.py index 8d1a2025271ff..d0914fd1925c6 100644 --- a/Lib/email/headerregistry.py +++ b/Lib/email/headerregistry.py @@ -31,6 +31,11 @@ def __init__(self, display_name='', username='', domain='', addr_spec=None): without any Content Transfer Encoding. """ + + inputs = ''.join(filter(None, (display_name, username, domain, addr_spec))) + if '\r' in inputs or '\n' in inputs: + raise ValueError("invalid arguments; address parts cannot contain CR or LF") + # This clause with its potential 'raise' may only happen when an # application program creates an Address object using an addr_spec # keyword. The email library code itself must always supply username diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py index 8d89c5dd58322..a43d51f730ad5 100644 --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -1436,6 +1436,25 @@ def test_il8n(self): # with self.assertRaises(ValueError): # Address('foo', 'w?k', 'example.com') + def test_crlf_in_constructor_args_raises(self): + cases = ( + dict(display_name='foo\r'), + dict(display_name='foo\n'), + dict(display_name='foo\r\n'), + dict(domain='example.com\r'), + dict(domain='example.com\n'), + dict(domain='example.com\r\n'), + dict(username='wok\r'), + dict(username='wok\n'), + dict(username='wok\r\n'), + dict(addr_spec='wok at example.com\r'), + dict(addr_spec='wok at example.com\n'), + dict(addr_spec='wok at example.com\r\n') + ) + for kwargs in cases: + with self.subTest(kwargs=kwargs), self.assertRaisesRegex(ValueError, "invalid arguments"): + Address(**kwargs) + def test_non_ascii_username_in_addr_spec_raises(self): with self.assertRaises(ValueError): Address('foo', addr_spec='w?k at example.com') diff --git a/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst new file mode 100644 index 0000000000000..6c9447b897bf6 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst @@ -0,0 +1 @@ +Disallow CR or LF in email.headerregistry.Address arguments to guard against header injection attacks. From webhook-mailer at python.org Wed May 27 09:38:19 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 13:38:19 -0000 Subject: [Python-checkins] bpo-39073: validate Address parts to disallow CRLF (GH-19007) Message-ID: https://github.com/python/cpython/commit/a93bf82980d7c02217a088bafa193f32a4d13abb commit: a93bf82980d7c02217a088bafa193f32a4d13abb branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T06:38:14-07:00 summary: bpo-39073: validate Address parts to disallow CRLF (GH-19007) Disallow CR or LF in email.headerregistry.Address arguments to guard against header injection attacks. (cherry picked from commit 614f17211c5fc0e5b828be1d3320661d1038fe8f) Co-authored-by: Ashwin Ramaswami files: A Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst M Lib/email/headerregistry.py M Lib/test/test_email/test_headerregistry.py diff --git a/Lib/email/headerregistry.py b/Lib/email/headerregistry.py index 0218cbfbd098d..fe30fc2c7e713 100644 --- a/Lib/email/headerregistry.py +++ b/Lib/email/headerregistry.py @@ -31,6 +31,11 @@ def __init__(self, display_name='', username='', domain='', addr_spec=None): without any Content Transfer Encoding. """ + + inputs = ''.join(filter(None, (display_name, username, domain, addr_spec))) + if '\r' in inputs or '\n' in inputs: + raise ValueError("invalid arguments; address parts cannot contain CR or LF") + # This clause with its potential 'raise' may only happen when an # application program creates an Address object using an addr_spec # keyword. The email library code itself must always supply username diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py index b54df3ebc61a1..e808e502b21e6 100644 --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -1436,6 +1436,25 @@ def test_il8n(self): # with self.assertRaises(ValueError): # Address('foo', 'w?k', 'example.com') + def test_crlf_in_constructor_args_raises(self): + cases = ( + dict(display_name='foo\r'), + dict(display_name='foo\n'), + dict(display_name='foo\r\n'), + dict(domain='example.com\r'), + dict(domain='example.com\n'), + dict(domain='example.com\r\n'), + dict(username='wok\r'), + dict(username='wok\n'), + dict(username='wok\r\n'), + dict(addr_spec='wok at example.com\r'), + dict(addr_spec='wok at example.com\n'), + dict(addr_spec='wok at example.com\r\n') + ) + for kwargs in cases: + with self.subTest(kwargs=kwargs), self.assertRaisesRegex(ValueError, "invalid arguments"): + Address(**kwargs) + def test_non_ascii_username_in_addr_spec_raises(self): with self.assertRaises(ValueError): Address('foo', addr_spec='w?k at example.com') diff --git a/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst new file mode 100644 index 0000000000000..6c9447b897bf6 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst @@ -0,0 +1 @@ +Disallow CR or LF in email.headerregistry.Address arguments to guard against header injection attacks. From webhook-mailer at python.org Wed May 27 11:22:12 2020 From: webhook-mailer at python.org (Sean Gillespie) Date: Wed, 27 May 2020 15:22:12 -0000 Subject: [Python-checkins] bpo-13097: ctypes: limit callback to 1024 arguments (GH-19914) Message-ID: https://github.com/python/cpython/commit/29a1384c040d39659e7d01f1fd7b6eb71ef2634e commit: 29a1384c040d39659e7d01f1fd7b6eb71ef2634e branch: master author: Sean Gillespie committer: GitHub date: 2020-05-27T17:22:07+02:00 summary: bpo-13097: ctypes: limit callback to 1024 arguments (GH-19914) ctypes now raises an ArgumentError when a callback is invoked with more than 1024 arguments. The ctypes module allocates arguments on the stack in ctypes_callproc() using alloca(), which is problematic when large numbers of arguments are passed. Instead of a stack overflow, this commit raises an ArgumentError if more than 1024 parameters are passed. files: A Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst M Lib/ctypes/test/test_callbacks.py M Modules/_ctypes/callproc.c diff --git a/Lib/ctypes/test/test_callbacks.py b/Lib/ctypes/test/test_callbacks.py index f622093df61da..937a06d981b00 100644 --- a/Lib/ctypes/test/test_callbacks.py +++ b/Lib/ctypes/test/test_callbacks.py @@ -287,6 +287,21 @@ def callback(check, s): self.assertEqual(s.second, check.second) self.assertEqual(s.third, check.third) + def test_callback_too_many_args(self): + def func(*args): + return len(args) + + CTYPES_MAX_ARGCOUNT = 1024 + proto = CFUNCTYPE(c_int, *(c_int,) * CTYPES_MAX_ARGCOUNT) + cb = proto(func) + args1 = (1,) * CTYPES_MAX_ARGCOUNT + self.assertEqual(cb(*args1), CTYPES_MAX_ARGCOUNT) + + args2 = (1,) * (CTYPES_MAX_ARGCOUNT + 1) + with self.assertRaises(ArgumentError): + cb(*args2) + + ################################################################ if __name__ == '__main__': diff --git a/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst b/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst new file mode 100644 index 0000000000000..a7f5f58828917 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst @@ -0,0 +1 @@ +``ctypes`` now raises an ``ArgumentError`` when a callback is invoked with more than 1024 arguments. \ No newline at end of file diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 5c1ecabd8164d..9bc28c260717d 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1072,6 +1072,14 @@ GetComError(HRESULT errcode, GUID *riid, IUnknown *pIunk) #define IS_PASS_BY_REF(x) (x > 8 || !POW2(x)) #endif +/* + * bpo-13097: Max number of arguments _ctypes_callproc will accept. + * + * This limit is enforced for the `alloca()` call in `_ctypes_callproc`, + * to avoid allocating a massive buffer on the stack. + */ +#define CTYPES_MAX_ARGCOUNT 1024 + /* * Requirements, must be ensured by the caller: * - argtuple is tuple of arguments @@ -1107,6 +1115,13 @@ PyObject *_ctypes_callproc(PPROC pProc, ++argcount; #endif + if (argcount > CTYPES_MAX_ARGCOUNT) + { + PyErr_Format(PyExc_ArgError, "too many arguments (%zi), maximum is %i", + argcount, CTYPES_MAX_ARGCOUNT); + return NULL; + } + args = (struct argument *)alloca(sizeof(struct argument) * argcount); if (!args) { PyErr_NoMemory(); From webhook-mailer at python.org Wed May 27 11:51:32 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 15:51:32 -0000 Subject: [Python-checkins] bpo-13097: ctypes: limit callback to 1024 arguments (GH-19914) Message-ID: https://github.com/python/cpython/commit/1c4dcafd0b025e771f4dbd7197d0b5f263c9cb54 commit: 1c4dcafd0b025e771f4dbd7197d0b5f263c9cb54 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T08:51:23-07:00 summary: bpo-13097: ctypes: limit callback to 1024 arguments (GH-19914) ctypes now raises an ArgumentError when a callback is invoked with more than 1024 arguments. The ctypes module allocates arguments on the stack in ctypes_callproc() using alloca(), which is problematic when large numbers of arguments are passed. Instead of a stack overflow, this commit raises an ArgumentError if more than 1024 parameters are passed. (cherry picked from commit 29a1384c040d39659e7d01f1fd7b6eb71ef2634e) Co-authored-by: Sean Gillespie files: A Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst M Lib/ctypes/test/test_callbacks.py M Modules/_ctypes/callproc.c diff --git a/Lib/ctypes/test/test_callbacks.py b/Lib/ctypes/test/test_callbacks.py index f622093df61da..937a06d981b00 100644 --- a/Lib/ctypes/test/test_callbacks.py +++ b/Lib/ctypes/test/test_callbacks.py @@ -287,6 +287,21 @@ def callback(check, s): self.assertEqual(s.second, check.second) self.assertEqual(s.third, check.third) + def test_callback_too_many_args(self): + def func(*args): + return len(args) + + CTYPES_MAX_ARGCOUNT = 1024 + proto = CFUNCTYPE(c_int, *(c_int,) * CTYPES_MAX_ARGCOUNT) + cb = proto(func) + args1 = (1,) * CTYPES_MAX_ARGCOUNT + self.assertEqual(cb(*args1), CTYPES_MAX_ARGCOUNT) + + args2 = (1,) * (CTYPES_MAX_ARGCOUNT + 1) + with self.assertRaises(ArgumentError): + cb(*args2) + + ################################################################ if __name__ == '__main__': diff --git a/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst b/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst new file mode 100644 index 0000000000000..a7f5f58828917 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst @@ -0,0 +1 @@ +``ctypes`` now raises an ``ArgumentError`` when a callback is invoked with more than 1024 arguments. \ No newline at end of file diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 5861042398d40..73413531bdbf0 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1060,6 +1060,14 @@ GetComError(HRESULT errcode, GUID *riid, IUnknown *pIunk) #define IS_PASS_BY_REF(x) (x > 8 || !POW2(x)) #endif +/* + * bpo-13097: Max number of arguments _ctypes_callproc will accept. + * + * This limit is enforced for the `alloca()` call in `_ctypes_callproc`, + * to avoid allocating a massive buffer on the stack. + */ +#define CTYPES_MAX_ARGCOUNT 1024 + /* * Requirements, must be ensured by the caller: * - argtuple is tuple of arguments @@ -1095,6 +1103,13 @@ PyObject *_ctypes_callproc(PPROC pProc, ++argcount; #endif + if (argcount > CTYPES_MAX_ARGCOUNT) + { + PyErr_Format(PyExc_ArgError, "too many arguments (%zi), maximum is %i", + argcount, CTYPES_MAX_ARGCOUNT); + return NULL; + } + args = (struct argument *)alloca(sizeof(struct argument) * argcount); if (!args) { PyErr_NoMemory(); From webhook-mailer at python.org Wed May 27 11:53:12 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 15:53:12 -0000 Subject: [Python-checkins] bpo-13097: ctypes: limit callback to 1024 arguments (GH-19914) Message-ID: https://github.com/python/cpython/commit/a285af7e626d1b81cf09f8b2bf7656f100bc1237 commit: a285af7e626d1b81cf09f8b2bf7656f100bc1237 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T08:53:03-07:00 summary: bpo-13097: ctypes: limit callback to 1024 arguments (GH-19914) ctypes now raises an ArgumentError when a callback is invoked with more than 1024 arguments. The ctypes module allocates arguments on the stack in ctypes_callproc() using alloca(), which is problematic when large numbers of arguments are passed. Instead of a stack overflow, this commit raises an ArgumentError if more than 1024 parameters are passed. (cherry picked from commit 29a1384c040d39659e7d01f1fd7b6eb71ef2634e) Co-authored-by: Sean Gillespie files: A Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst M Lib/ctypes/test/test_callbacks.py M Modules/_ctypes/callproc.c diff --git a/Lib/ctypes/test/test_callbacks.py b/Lib/ctypes/test/test_callbacks.py index f622093df61da..937a06d981b00 100644 --- a/Lib/ctypes/test/test_callbacks.py +++ b/Lib/ctypes/test/test_callbacks.py @@ -287,6 +287,21 @@ def callback(check, s): self.assertEqual(s.second, check.second) self.assertEqual(s.third, check.third) + def test_callback_too_many_args(self): + def func(*args): + return len(args) + + CTYPES_MAX_ARGCOUNT = 1024 + proto = CFUNCTYPE(c_int, *(c_int,) * CTYPES_MAX_ARGCOUNT) + cb = proto(func) + args1 = (1,) * CTYPES_MAX_ARGCOUNT + self.assertEqual(cb(*args1), CTYPES_MAX_ARGCOUNT) + + args2 = (1,) * (CTYPES_MAX_ARGCOUNT + 1) + with self.assertRaises(ArgumentError): + cb(*args2) + + ################################################################ if __name__ == '__main__': diff --git a/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst b/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst new file mode 100644 index 0000000000000..a7f5f58828917 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst @@ -0,0 +1 @@ +``ctypes`` now raises an ``ArgumentError`` when a callback is invoked with more than 1024 arguments. \ No newline at end of file diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 4027bdb622750..e0a110d3f4604 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1073,6 +1073,14 @@ GetComError(HRESULT errcode, GUID *riid, IUnknown *pIunk) #define IS_PASS_BY_REF(x) (x > 8 || !POW2(x)) #endif +/* + * bpo-13097: Max number of arguments _ctypes_callproc will accept. + * + * This limit is enforced for the `alloca()` call in `_ctypes_callproc`, + * to avoid allocating a massive buffer on the stack. + */ +#define CTYPES_MAX_ARGCOUNT 1024 + /* * Requirements, must be ensured by the caller: * - argtuple is tuple of arguments @@ -1108,6 +1116,13 @@ PyObject *_ctypes_callproc(PPROC pProc, ++argcount; #endif + if (argcount > CTYPES_MAX_ARGCOUNT) + { + PyErr_Format(PyExc_ArgError, "too many arguments (%zi), maximum is %i", + argcount, CTYPES_MAX_ARGCOUNT); + return NULL; + } + args = (struct argument *)alloca(sizeof(struct argument) * argcount); if (!args) { PyErr_NoMemory(); From webhook-mailer at python.org Wed May 27 12:04:20 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Wed, 27 May 2020 16:04:20 -0000 Subject: [Python-checkins] Refactor error handling code in Parser/pegen/pegen.c (GH-20440) Message-ID: https://github.com/python/cpython/commit/526e23f1538134b728c21ac71ac977ae9e6a8de6 commit: 526e23f1538134b728c21ac71ac977ae9e6a8de6 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-27T09:04:11-07:00 summary: Refactor error handling code in Parser/pegen/pegen.c (GH-20440) Set p->error_indicator in various places, where it's needed, but it's not done. Automerge-Triggered-By: @gvanrossum files: M Parser/pegen/pegen.c diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index a0285bcb60e95..c55ff7e45c0da 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -775,15 +775,15 @@ _PyPegen_expect_soft_keyword(Parser *p, const char *keyword) if (t->type != NAME) { return NULL; } - char* s = PyBytes_AsString(t->bytes); + char *s = PyBytes_AsString(t->bytes); if (!s) { + p->error_indicator = 1; return NULL; } if (strcmp(s, keyword) != 0) { return NULL; } - expr_ty res = _PyPegen_name_token(p); - return res; + return _PyPegen_name_token(p); } Token * @@ -809,10 +809,12 @@ _PyPegen_name_token(Parser *p) } char* s = PyBytes_AsString(t->bytes); if (!s) { + p->error_indicator = 1; return NULL; } PyObject *id = _PyPegen_new_identifier(p, s); if (id == NULL) { + p->error_indicator = 1; return NULL; } return Name(id, Load, t->lineno, t->col_offset, t->end_lineno, t->end_col_offset, @@ -905,6 +907,7 @@ _PyPegen_number_token(Parser *p) char *num_raw = PyBytes_AsString(t->bytes); if (num_raw == NULL) { + p->error_indicator = 1; return NULL; } @@ -917,11 +920,13 @@ _PyPegen_number_token(Parser *p) PyObject *c = parsenumber(num_raw); if (c == NULL) { + p->error_indicator = 1; return NULL; } if (PyArena_AddPyObject(p->arena, c) < 0) { Py_DECREF(c); + p->error_indicator = 1; return NULL; } From webhook-mailer at python.org Wed May 27 15:47:38 2020 From: webhook-mailer at python.org (Fantix King) Date: Wed, 27 May 2020 19:47:38 -0000 Subject: [Python-checkins] bpo-30064: Fix asyncio loop.sock_* race condition issue (#20369) Message-ID: https://github.com/python/cpython/commit/210a137396979d747c2602eeef46c34fc4955448 commit: 210a137396979d747c2602eeef46c34fc4955448 branch: master author: Fantix King committer: GitHub date: 2020-05-27T12:47:30-07:00 summary: bpo-30064: Fix asyncio loop.sock_* race condition issue (#20369) files: A Misc/NEWS.d/next/Library/2020-05-25-11-52-23.bpo-30064.6CICsH.rst M Lib/asyncio/selector_events.py M Lib/test/test_asyncio/test_sock_lowlevel.py diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index a05cbb6bdd69d..884a58f2ed650 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -266,6 +266,7 @@ def _add_reader(self, fd, callback, *args): (handle, writer)) if reader is not None: reader.cancel() + return handle def _remove_reader(self, fd): if self.is_closed(): @@ -302,6 +303,7 @@ def _add_writer(self, fd, callback, *args): (reader, handle)) if writer is not None: writer.cancel() + return handle def _remove_writer(self, fd): """Remove a writer callback.""" @@ -329,7 +331,7 @@ def _remove_writer(self, fd): def add_reader(self, fd, callback, *args): """Add a reader callback.""" self._ensure_fd_no_transport(fd) - return self._add_reader(fd, callback, *args) + self._add_reader(fd, callback, *args) def remove_reader(self, fd): """Remove a reader callback.""" @@ -339,7 +341,7 @@ def remove_reader(self, fd): def add_writer(self, fd, callback, *args): """Add a writer callback..""" self._ensure_fd_no_transport(fd) - return self._add_writer(fd, callback, *args) + self._add_writer(fd, callback, *args) def remove_writer(self, fd): """Remove a writer callback.""" @@ -362,13 +364,15 @@ async def sock_recv(self, sock, n): pass fut = self.create_future() fd = sock.fileno() - self.add_reader(fd, self._sock_recv, fut, sock, n) + self._ensure_fd_no_transport(fd) + handle = self._add_reader(fd, self._sock_recv, fut, sock, n) fut.add_done_callback( - functools.partial(self._sock_read_done, fd)) + functools.partial(self._sock_read_done, fd, handle=handle)) return await fut - def _sock_read_done(self, fd, fut): - self.remove_reader(fd) + def _sock_read_done(self, fd, fut, handle=None): + if handle is None or not handle.cancelled(): + self.remove_reader(fd) def _sock_recv(self, fut, sock, n): # _sock_recv() can add itself as an I/O callback if the operation can't @@ -401,9 +405,10 @@ async def sock_recv_into(self, sock, buf): pass fut = self.create_future() fd = sock.fileno() - self.add_reader(fd, self._sock_recv_into, fut, sock, buf) + self._ensure_fd_no_transport(fd) + handle = self._add_reader(fd, self._sock_recv_into, fut, sock, buf) fut.add_done_callback( - functools.partial(self._sock_read_done, fd)) + functools.partial(self._sock_read_done, fd, handle=handle)) return await fut def _sock_recv_into(self, fut, sock, buf): @@ -446,11 +451,12 @@ async def sock_sendall(self, sock, data): fut = self.create_future() fd = sock.fileno() - fut.add_done_callback( - functools.partial(self._sock_write_done, fd)) + self._ensure_fd_no_transport(fd) # use a trick with a list in closure to store a mutable state - self.add_writer(fd, self._sock_sendall, fut, sock, - memoryview(data), [n]) + handle = self._add_writer(fd, self._sock_sendall, fut, sock, + memoryview(data), [n]) + fut.add_done_callback( + functools.partial(self._sock_write_done, fd, handle=handle)) return await fut def _sock_sendall(self, fut, sock, view, pos): @@ -502,9 +508,11 @@ def _sock_connect(self, fut, sock, address): # connection runs in background. We have to wait until the socket # becomes writable to be notified when the connection succeed or # fails. + self._ensure_fd_no_transport(fd) + handle = self._add_writer( + fd, self._sock_connect_cb, fut, sock, address) fut.add_done_callback( - functools.partial(self._sock_write_done, fd)) - self.add_writer(fd, self._sock_connect_cb, fut, sock, address) + functools.partial(self._sock_write_done, fd, handle=handle)) except (SystemExit, KeyboardInterrupt): raise except BaseException as exc: @@ -512,8 +520,9 @@ def _sock_connect(self, fut, sock, address): else: fut.set_result(None) - def _sock_write_done(self, fd, fut): - self.remove_writer(fd) + def _sock_write_done(self, fd, fut, handle=None): + if handle is None or not handle.cancelled(): + self.remove_writer(fd) def _sock_connect_cb(self, fut, sock, address): if fut.done(): diff --git a/Lib/test/test_asyncio/test_sock_lowlevel.py b/Lib/test/test_asyncio/test_sock_lowlevel.py index 2f2d5a454973b..5e6a90abb46cf 100644 --- a/Lib/test/test_asyncio/test_sock_lowlevel.py +++ b/Lib/test/test_asyncio/test_sock_lowlevel.py @@ -1,4 +1,5 @@ import socket +import time import asyncio import sys from asyncio import proactor_events @@ -122,6 +123,136 @@ def test_sock_client_ops(self): sock = socket.socket() self._basetest_sock_recv_into(httpd, sock) + async def _basetest_sock_recv_racing(self, httpd, sock): + sock.setblocking(False) + await self.loop.sock_connect(sock, httpd.address) + + task = asyncio.create_task(self.loop.sock_recv(sock, 1024)) + await asyncio.sleep(0) + task.cancel() + + asyncio.create_task( + self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n')) + data = await self.loop.sock_recv(sock, 1024) + # consume data + await self.loop.sock_recv(sock, 1024) + + self.assertTrue(data.startswith(b'HTTP/1.0 200 OK')) + + async def _basetest_sock_recv_into_racing(self, httpd, sock): + sock.setblocking(False) + await self.loop.sock_connect(sock, httpd.address) + + data = bytearray(1024) + with memoryview(data) as buf: + task = asyncio.create_task( + self.loop.sock_recv_into(sock, buf[:1024])) + await asyncio.sleep(0) + task.cancel() + + task = asyncio.create_task( + self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n')) + nbytes = await self.loop.sock_recv_into(sock, buf[:1024]) + # consume data + await self.loop.sock_recv_into(sock, buf[nbytes:]) + self.assertTrue(data.startswith(b'HTTP/1.0 200 OK')) + + await task + + async def _basetest_sock_send_racing(self, listener, sock): + listener.bind(('127.0.0.1', 0)) + listener.listen(1) + + # make connection + sock.setblocking(False) + task = asyncio.create_task( + self.loop.sock_connect(sock, listener.getsockname())) + await asyncio.sleep(0) + server = listener.accept()[0] + server.setblocking(False) + + with server: + await task + + # fill the buffer + with self.assertRaises(BlockingIOError): + while True: + sock.send(b' ' * 5) + + # cancel a blocked sock_sendall + task = asyncio.create_task( + self.loop.sock_sendall(sock, b'hello')) + await asyncio.sleep(0) + task.cancel() + + # clear the buffer + async def recv_until(): + data = b'' + while not data: + data = await self.loop.sock_recv(server, 1024) + data = data.strip() + return data + task = asyncio.create_task(recv_until()) + + # immediately register another sock_sendall + await self.loop.sock_sendall(sock, b'world') + data = await task + # ProactorEventLoop could deliver hello + self.assertTrue(data.endswith(b'world')) + + async def _basetest_sock_connect_racing(self, listener, sock): + listener.bind(('127.0.0.1', 0)) + addr = listener.getsockname() + sock.setblocking(False) + + task = asyncio.create_task(self.loop.sock_connect(sock, addr)) + await asyncio.sleep(0) + task.cancel() + + listener.listen(1) + i = 0 + while True: + try: + await self.loop.sock_connect(sock, addr) + break + except ConnectionRefusedError: # on Linux we need another retry + await self.loop.sock_connect(sock, addr) + break + except OSError as e: # on Windows we need more retries + # A connect request was made on an already connected socket + if getattr(e, 'winerror', 0) == 10056: + break + + # https://stackoverflow.com/a/54437602/3316267 + if getattr(e, 'winerror', 0) != 10022: + raise + i += 1 + if i >= 128: + raise # too many retries + # avoid touching event loop to maintain race condition + time.sleep(0.01) + + def test_sock_client_racing(self): + with test_utils.run_test_server() as httpd: + sock = socket.socket() + with sock: + self.loop.run_until_complete(asyncio.wait_for( + self._basetest_sock_recv_racing(httpd, sock), 10)) + sock = socket.socket() + with sock: + self.loop.run_until_complete(asyncio.wait_for( + self._basetest_sock_recv_into_racing(httpd, sock), 10)) + listener = socket.socket() + sock = socket.socket() + with listener, sock: + self.loop.run_until_complete(asyncio.wait_for( + self._basetest_sock_send_racing(listener, sock), 10)) + listener = socket.socket() + sock = socket.socket() + with listener, sock: + self.loop.run_until_complete(asyncio.wait_for( + self._basetest_sock_connect_racing(listener, sock), 10)) + async def _basetest_huge_content(self, address): sock = socket.socket() sock.setblocking(False) diff --git a/Misc/NEWS.d/next/Library/2020-05-25-11-52-23.bpo-30064.6CICsH.rst b/Misc/NEWS.d/next/Library/2020-05-25-11-52-23.bpo-30064.6CICsH.rst new file mode 100644 index 0000000000000..904991dca16d8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-25-11-52-23.bpo-30064.6CICsH.rst @@ -0,0 +1 @@ +Fix asyncio ``loop.sock_*`` race condition issue From webhook-mailer at python.org Wed May 27 15:50:12 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Wed, 27 May 2020 19:50:12 -0000 Subject: [Python-checkins] bpo-40791: Use CRYPTO_memcmp() for compare_digest (#20456) Message-ID: https://github.com/python/cpython/commit/db5aed931f8a617f7b63e773f62db468fe9c5ca1 commit: db5aed931f8a617f7b63e773f62db468fe9c5ca1 branch: master author: Christian Heimes committer: GitHub date: 2020-05-27T21:50:06+02:00 summary: bpo-40791: Use CRYPTO_memcmp() for compare_digest (#20456) hashlib.compare_digest uses OpenSSL's CRYPTO_memcmp() function when OpenSSL is available. Note: The _operator module is a builtin module. I don't want to add libcrypto dependency to libpython. Therefore I duplicated the wrapper function and added a copy to _hashopenssl.c. files: A Misc/NEWS.d/next/Library/2020-05-27-18-04-52.bpo-40791.IzpNor.rst M Doc/library/hmac.rst M Lib/hmac.py M Lib/test/test_hmac.py M Modules/_hashopenssl.c M Modules/_operator.c M Modules/clinic/_hashopenssl.c.h diff --git a/Doc/library/hmac.rst b/Doc/library/hmac.rst index 5ad348490eaf6..6f1b59b57ce58 100644 --- a/Doc/library/hmac.rst +++ b/Doc/library/hmac.rst @@ -138,6 +138,11 @@ This module also provides the following helper function: .. versionadded:: 3.3 + .. versionchanged:: 3.10 + + The function uses OpenSSL's ``CRYPTO_memcmp()`` internally when + available. + .. seealso:: diff --git a/Lib/hmac.py b/Lib/hmac.py index 54a1ef9bdbdcf..180bc378b52d6 100644 --- a/Lib/hmac.py +++ b/Lib/hmac.py @@ -4,14 +4,15 @@ """ import warnings as _warnings -from _operator import _compare_digest as compare_digest try: import _hashlib as _hashopenssl except ImportError: _hashopenssl = None _openssl_md_meths = None + from _operator import _compare_digest as compare_digest else: _openssl_md_meths = frozenset(_hashopenssl.openssl_md_meth_names) + compare_digest = _hashopenssl.compare_digest import hashlib as _hashlib trans_5C = bytes((x ^ 0x5C) for x in range(256)) diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py index 7a52e39c5d471..6daf22ca06fb8 100644 --- a/Lib/test/test_hmac.py +++ b/Lib/test/test_hmac.py @@ -8,12 +8,16 @@ from test.support import hashlib_helper +from _operator import _compare_digest as operator_compare_digest + try: from _hashlib import HMAC as C_HMAC from _hashlib import hmac_new as c_hmac_new + from _hashlib import compare_digest as openssl_compare_digest except ImportError: C_HMAC = None c_hmac_new = None + openssl_compare_digest = None def ignore_warning(func): @@ -505,87 +509,101 @@ def test_equality_new(self): class CompareDigestTestCase(unittest.TestCase): - def test_compare_digest(self): + def test_hmac_compare_digest(self): + self._test_compare_digest(hmac.compare_digest) + if openssl_compare_digest is not None: + self.assertIs(hmac.compare_digest, openssl_compare_digest) + else: + self.assertIs(hmac.compare_digest, operator_compare_digest) + + def test_operator_compare_digest(self): + self._test_compare_digest(operator_compare_digest) + + @unittest.skipIf(openssl_compare_digest is None, "test requires _hashlib") + def test_openssl_compare_digest(self): + self._test_compare_digest(openssl_compare_digest) + + def _test_compare_digest(self, compare_digest): # Testing input type exception handling a, b = 100, 200 - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) a, b = 100, b"foobar" - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) a, b = b"foobar", 200 - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) a, b = "foobar", b"foobar" - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) a, b = b"foobar", "foobar" - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) # Testing bytes of different lengths a, b = b"foobar", b"foo" - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) a, b = b"\xde\xad\xbe\xef", b"\xde\xad" - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) # Testing bytes of same lengths, different values a, b = b"foobar", b"foobaz" - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) a, b = b"\xde\xad\xbe\xef", b"\xab\xad\x1d\xea" - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) # Testing bytes of same lengths, same values a, b = b"foobar", b"foobar" - self.assertTrue(hmac.compare_digest(a, b)) + self.assertTrue(compare_digest(a, b)) a, b = b"\xde\xad\xbe\xef", b"\xde\xad\xbe\xef" - self.assertTrue(hmac.compare_digest(a, b)) + self.assertTrue(compare_digest(a, b)) # Testing bytearrays of same lengths, same values a, b = bytearray(b"foobar"), bytearray(b"foobar") - self.assertTrue(hmac.compare_digest(a, b)) + self.assertTrue(compare_digest(a, b)) # Testing bytearrays of different lengths a, b = bytearray(b"foobar"), bytearray(b"foo") - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) # Testing bytearrays of same lengths, different values a, b = bytearray(b"foobar"), bytearray(b"foobaz") - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) # Testing byte and bytearray of same lengths, same values a, b = bytearray(b"foobar"), b"foobar" - self.assertTrue(hmac.compare_digest(a, b)) - self.assertTrue(hmac.compare_digest(b, a)) + self.assertTrue(compare_digest(a, b)) + self.assertTrue(compare_digest(b, a)) # Testing byte bytearray of different lengths a, b = bytearray(b"foobar"), b"foo" - self.assertFalse(hmac.compare_digest(a, b)) - self.assertFalse(hmac.compare_digest(b, a)) + self.assertFalse(compare_digest(a, b)) + self.assertFalse(compare_digest(b, a)) # Testing byte and bytearray of same lengths, different values a, b = bytearray(b"foobar"), b"foobaz" - self.assertFalse(hmac.compare_digest(a, b)) - self.assertFalse(hmac.compare_digest(b, a)) + self.assertFalse(compare_digest(a, b)) + self.assertFalse(compare_digest(b, a)) # Testing str of same lengths a, b = "foobar", "foobar" - self.assertTrue(hmac.compare_digest(a, b)) + self.assertTrue(compare_digest(a, b)) # Testing str of different lengths a, b = "foo", "foobar" - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) # Testing bytes of same lengths, different values a, b = "foobar", "foobaz" - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) # Testing error cases a, b = "foobar", b"foobar" - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) a, b = b"foobar", "foobar" - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) a, b = b"foobar", 1 - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) a, b = 100, 200 - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) a, b = "foo?", "foo?" - self.assertRaises(TypeError, hmac.compare_digest, a, b) + self.assertRaises(TypeError, compare_digest, a, b) # subclasses are supported by ignore __eq__ class mystr(str): @@ -593,22 +611,22 @@ def __eq__(self, other): return False a, b = mystr("foobar"), mystr("foobar") - self.assertTrue(hmac.compare_digest(a, b)) + self.assertTrue(compare_digest(a, b)) a, b = mystr("foobar"), "foobar" - self.assertTrue(hmac.compare_digest(a, b)) + self.assertTrue(compare_digest(a, b)) a, b = mystr("foobar"), mystr("foobaz") - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) class mybytes(bytes): def __eq__(self, other): return False a, b = mybytes(b"foobar"), mybytes(b"foobar") - self.assertTrue(hmac.compare_digest(a, b)) + self.assertTrue(compare_digest(a, b)) a, b = mybytes(b"foobar"), b"foobar" - self.assertTrue(hmac.compare_digest(a, b)) + self.assertTrue(compare_digest(a, b)) a, b = mybytes(b"foobar"), mybytes(b"foobaz") - self.assertFalse(hmac.compare_digest(a, b)) + self.assertFalse(compare_digest(a, b)) if __name__ == "__main__": diff --git a/Misc/NEWS.d/next/Library/2020-05-27-18-04-52.bpo-40791.IzpNor.rst b/Misc/NEWS.d/next/Library/2020-05-27-18-04-52.bpo-40791.IzpNor.rst new file mode 100644 index 0000000000000..b88f308ec3b52 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-27-18-04-52.bpo-40791.IzpNor.rst @@ -0,0 +1,2 @@ +:func:`hashlib.compare_digest` uses OpenSSL's ``CRYPTO_memcmp()`` function +when OpenSSL is available. diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 0b2ef95a6f126..adc8653773250 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -21,6 +21,7 @@ /* EVP is the preferred interface to hashing in OpenSSL */ #include #include +#include /* We use the object interface to discover what hashes OpenSSL supports. */ #include #include "openssl/err.h" @@ -1833,6 +1834,120 @@ _hashlib_get_fips_mode_impl(PyObject *module) #endif // !LIBRESSL_VERSION_NUMBER +static int +_tscmp(const unsigned char *a, const unsigned char *b, + Py_ssize_t len_a, Py_ssize_t len_b) +{ + /* loop count depends on length of b. Might leak very little timing + * information if sizes are different. + */ + Py_ssize_t length = len_b; + const void *left = a; + const void *right = b; + int result = 0; + + if (len_a != length) { + left = b; + result = 1; + } + + result |= CRYPTO_memcmp(left, right, length); + + return (result == 0); +} + +/* NOTE: Keep in sync with _operator.c implementation. */ + +/*[clinic input] +_hashlib.compare_digest + + a: object + b: object + / + +Return 'a == b'. + +This function uses an approach designed to prevent +timing analysis, making it appropriate for cryptography. + +a and b must both be of the same type: either str (ASCII only), +or any bytes-like object. + +Note: If a and b are of different lengths, or if an error occurs, +a timing attack could theoretically reveal information about the +types and lengths of a and b--but not their values. +[clinic start generated code]*/ + +static PyObject * +_hashlib_compare_digest_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=6f1c13927480aed9 input=9c40c6e566ca12f5]*/ +{ + int rc; + + /* ASCII unicode string */ + if(PyUnicode_Check(a) && PyUnicode_Check(b)) { + if (PyUnicode_READY(a) == -1 || PyUnicode_READY(b) == -1) { + return NULL; + } + if (!PyUnicode_IS_ASCII(a) || !PyUnicode_IS_ASCII(b)) { + PyErr_SetString(PyExc_TypeError, + "comparing strings with non-ASCII characters is " + "not supported"); + return NULL; + } + + rc = _tscmp(PyUnicode_DATA(a), + PyUnicode_DATA(b), + PyUnicode_GET_LENGTH(a), + PyUnicode_GET_LENGTH(b)); + } + /* fallback to buffer interface for bytes, bytesarray and other */ + else { + Py_buffer view_a; + Py_buffer view_b; + + if (PyObject_CheckBuffer(a) == 0 && PyObject_CheckBuffer(b) == 0) { + PyErr_Format(PyExc_TypeError, + "unsupported operand types(s) or combination of types: " + "'%.100s' and '%.100s'", + Py_TYPE(a)->tp_name, Py_TYPE(b)->tp_name); + return NULL; + } + + if (PyObject_GetBuffer(a, &view_a, PyBUF_SIMPLE) == -1) { + return NULL; + } + if (view_a.ndim > 1) { + PyErr_SetString(PyExc_BufferError, + "Buffer must be single dimension"); + PyBuffer_Release(&view_a); + return NULL; + } + + if (PyObject_GetBuffer(b, &view_b, PyBUF_SIMPLE) == -1) { + PyBuffer_Release(&view_a); + return NULL; + } + if (view_b.ndim > 1) { + PyErr_SetString(PyExc_BufferError, + "Buffer must be single dimension"); + PyBuffer_Release(&view_a); + PyBuffer_Release(&view_b); + return NULL; + } + + rc = _tscmp((const unsigned char*)view_a.buf, + (const unsigned char*)view_b.buf, + view_a.len, + view_b.len); + + PyBuffer_Release(&view_a); + PyBuffer_Release(&view_b); + } + + return PyBool_FromLong(rc); +} + /* List of functions exported by this module */ static struct PyMethodDef EVP_functions[] = { @@ -1840,6 +1955,7 @@ static struct PyMethodDef EVP_functions[] = { PBKDF2_HMAC_METHODDEF _HASHLIB_SCRYPT_METHODDEF _HASHLIB_GET_FIPS_MODE_METHODDEF + _HASHLIB_COMPARE_DIGEST_METHODDEF _HASHLIB_HMAC_SINGLESHOT_METHODDEF _HASHLIB_HMAC_NEW_METHODDEF _HASHLIB_OPENSSL_MD5_METHODDEF diff --git a/Modules/_operator.c b/Modules/_operator.c index 19026b6c38e60..8a54829e5bbcc 100644 --- a/Modules/_operator.c +++ b/Modules/_operator.c @@ -785,6 +785,8 @@ _operator_length_hint_impl(PyObject *module, PyObject *obj, return PyObject_LengthHint(obj, default_value); } +/* NOTE: Keep in sync with _hashopenssl.c implementation. */ + /*[clinic input] _operator._compare_digest = _operator.eq diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index 619cb1c8516b8..51ae2402896c1 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -1338,6 +1338,46 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #endif /* !defined(LIBRESSL_VERSION_NUMBER) */ +PyDoc_STRVAR(_hashlib_compare_digest__doc__, +"compare_digest($module, a, b, /)\n" +"--\n" +"\n" +"Return \'a == b\'.\n" +"\n" +"This function uses an approach designed to prevent\n" +"timing analysis, making it appropriate for cryptography.\n" +"\n" +"a and b must both be of the same type: either str (ASCII only),\n" +"or any bytes-like object.\n" +"\n" +"Note: If a and b are of different lengths, or if an error occurs,\n" +"a timing attack could theoretically reveal information about the\n" +"types and lengths of a and b--but not their values."); + +#define _HASHLIB_COMPARE_DIGEST_METHODDEF \ + {"compare_digest", (PyCFunction)(void(*)(void))_hashlib_compare_digest, METH_FASTCALL, _hashlib_compare_digest__doc__}, + +static PyObject * +_hashlib_compare_digest_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +_hashlib_compare_digest(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *a; + PyObject *b; + + if (!_PyArg_CheckPositional("compare_digest", nargs, 2, 2)) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = _hashlib_compare_digest_impl(module, a, b); + +exit: + return return_value; +} + #ifndef EVPXOF_DIGEST_METHODDEF #define EVPXOF_DIGEST_METHODDEF #endif /* !defined(EVPXOF_DIGEST_METHODDEF) */ @@ -1377,4 +1417,4 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF #define _HASHLIB_GET_FIPS_MODE_METHODDEF #endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */ -/*[clinic end generated code: output=d8dddcd85fb11dde input=a9049054013a1b77]*/ +/*[clinic end generated code: output=95447a60132f039e input=a9049054013a1b77]*/ From webhook-mailer at python.org Wed May 27 16:12:16 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 20:12:16 -0000 Subject: [Python-checkins] bpo-40025: Require _generate_next_value_ to be defined before members(GH-19763) Message-ID: https://github.com/python/cpython/commit/b5ecbf02e4dbdea6d1c9a6d7189137f76e70c073 commit: b5ecbf02e4dbdea6d1c9a6d7189137f76e70c073 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T13:12:08-07:00 summary: bpo-40025: Require _generate_next_value_ to be defined before members(GH-19763) require `_generate_next_value_` to be defined before members files: A Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst M Doc/library/enum.rst M Lib/enum.py M Lib/test/test_enum.py M Misc/ACKS diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index 7538084767d76..2649b7d8a51fd 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -273,6 +273,10 @@ overridden:: the next :class:`int` in sequence with the last :class:`int` provided, but the way it does this is an implementation detail and may change. +.. note:: + + The :meth:`_generate_next_value_` method must be defined before any members. + Iteration --------- diff --git a/Lib/enum.py b/Lib/enum.py index 108d389d948de..14cc00e783915 100644 --- a/Lib/enum.py +++ b/Lib/enum.py @@ -60,6 +60,7 @@ def __init__(self): self._member_names = [] self._last_values = [] self._ignore = [] + self._auto_called = False def __setitem__(self, key, value): """Changes anything not dundered or not a descriptor. @@ -77,6 +78,9 @@ def __setitem__(self, key, value): ): raise ValueError('_names_ are reserved for future Enum use') if key == '_generate_next_value_': + # check if members already defined as auto() + if self._auto_called: + raise TypeError("_generate_next_value_ must be defined before members") setattr(self, '_generate_next_value', value) elif key == '_ignore_': if isinstance(value, str): @@ -100,6 +104,7 @@ def __setitem__(self, key, value): # enum overwriting a descriptor? raise TypeError('%r already defined as: %r' % (key, self[key])) if isinstance(value, auto): + self._auto_called = True if value.value == _auto_null: value.value = self._generate_next_value(key, 1, len(self._member_names), self._last_values[:]) value = value.value diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py index 47081cf75ca08..a2a3c56734901 100644 --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -1702,6 +1702,16 @@ class Color(Enum): self.assertEqual(Color.blue.value, 2) self.assertEqual(Color.green.value, 3) + def test_auto_order(self): + with self.assertRaises(TypeError): + class Color(Enum): + red = auto() + green = auto() + blue = auto() + def _generate_next_value_(name, start, count, last): + return name + + def test_duplicate_auto(self): class Dupes(Enum): first = primero = auto() diff --git a/Misc/ACKS b/Misc/ACKS index 34a6fc439e89c..e85e370c98cc7 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1217,6 +1217,7 @@ Adam Olsen Bryan Olson Grant Olson Koray Oner +Ethan Onstott Piet van Oostrum Tomas Oppelstrup Jason Orendorff diff --git a/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst b/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst new file mode 100644 index 0000000000000..7b699de4e0726 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst @@ -0,0 +1 @@ +Raise TypeError when _generate_next_value_ is defined after members. Patch by Ethan Onstott. \ No newline at end of file From webhook-mailer at python.org Wed May 27 16:30:46 2020 From: webhook-mailer at python.org (Shantanu) Date: Wed, 27 May 2020 20:30:46 -0000 Subject: [Python-checkins] bpo-40614: Respect feature version for f-string debug expressions (GH-20196) Message-ID: https://github.com/python/cpython/commit/c116c94ff119485761460f1033cdee425bed0310 commit: c116c94ff119485761460f1033cdee425bed0310 branch: master author: Shantanu committer: GitHub date: 2020-05-27T21:30:38+01:00 summary: bpo-40614: Respect feature version for f-string debug expressions (GH-20196) Co-authored-by: Lysandros Nikolaou Co-authored-by: Pablo Galindo files: A Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst M Lib/test/test_ast.py M Parser/pegen/parse_string.c M Python/ast.c diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 3e9c8b55cdff4..a3b366ec35da1 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -663,6 +663,11 @@ def test_ast_asdl_signature(self): expressions[0] = f"expr = {ast.expr.__subclasses__()[0].__doc__}" self.assertCountEqual(ast.expr.__doc__.split("\n"), expressions) + def test_issue40614_feature_version(self): + ast.parse('f"{x=}"', feature_version=(3, 8)) + with self.assertRaises(SyntaxError): + ast.parse('f"{x=}"', feature_version=(3, 7)) + class ASTHelpers_Test(unittest.TestCase): maxDiff = None diff --git a/Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst b/Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst new file mode 100644 index 0000000000000..238b98c14a326 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst @@ -0,0 +1 @@ +:func:`ast.parse` will not parse self documenting expressions in f-strings when passed ``feature_version`` is less than ``(3, 8)``. diff --git a/Parser/pegen/parse_string.c b/Parser/pegen/parse_string.c index a0ec698fa56a2..e24ecc58d3aa1 100644 --- a/Parser/pegen/parse_string.c +++ b/Parser/pegen/parse_string.c @@ -928,6 +928,11 @@ fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int rec /* Check for =, which puts the text value of the expression in expr_text. */ if (**str == '=') { + if (p->feature_version < 8) { + RAISE_SYNTAX_ERROR("f-string: self documenting expressions are " + "only supported in Python 3.8 and greater"); + goto error; + } *str += 1; /* Skip over ASCII whitespace. No need to test for end of string diff --git a/Python/ast.c b/Python/ast.c index 2d20ca62aa837..c524b8e34e873 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -5069,6 +5069,12 @@ fstring_find_expr(const char **str, const char *end, int raw, int recurse_lvl, /* Check for =, which puts the text value of the expression in expr_text. */ if (**str == '=') { + if (c->c_feature_version < 8) { + ast_error(c, n, + "f-string: self documenting expressions are " + "only supported in Python 3.8 and greater"); + goto error; + } *str += 1; /* Skip over ASCII whitespace. No need to test for end of string From webhook-mailer at python.org Wed May 27 16:48:20 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Wed, 27 May 2020 20:48:20 -0000 Subject: [Python-checkins] Fix compiler warnings in _zoneinfo.c (GH-20342) Message-ID: https://github.com/python/cpython/commit/e4799b95945b44eb0e2eea26473db8e0a49ed0ee commit: e4799b95945b44eb0e2eea26473db8e0a49ed0ee branch: master author: Pablo Galindo committer: GitHub date: 2020-05-27T13:48:12-07:00 summary: Fix compiler warnings in _zoneinfo.c (GH-20342) ``` D:\a\cpython\cpython\Modules\_zoneinfo.c(903,52): warning C4267: '=': conversion from 'size_t' to 'unsigned int', possible loss of data [D:\a\cpython\cpython\PCbuild\_zoneinfo.vcxproj] D:\a\cpython\cpython\Modules\_zoneinfo.c(904,44): warning C4267: '=': conversion from 'size_t' to 'unsigned int', possible loss of data [D:\a\cpython\cpython\PCbuild\_zoneinfo.vcxproj] D:\a\cpython\cpython\Modules\_zoneinfo.c(1772,31): warning C4244: '=': conversion from 'ssize_t' to 'uint8_t', possible loss of data [D:\a\cpython\cpython\PCbuild\_zoneinfo.vcxproj] ``` files: M Modules/_zoneinfo.c diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index d852c763e2e3d..e8b28319993a1 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -36,8 +36,8 @@ typedef struct { PyObject *key; PyObject *file_repr; PyObject *weakreflist; - unsigned int num_transitions; - unsigned int num_ttinfos; + size_t num_transitions; + size_t num_ttinfos; int64_t *trans_list_utc; int64_t *trans_list_wall[2]; _ttinfo **trans_ttinfos; // References to the ttinfo for each transition @@ -117,14 +117,14 @@ ts_to_local(size_t *trans_idx, int64_t *trans_utc, long *utcoff, static int parse_tz_str(PyObject *tz_str_obj, _tzrule *out); -static ssize_t +static Py_ssize_t parse_abbr(const char *const p, PyObject **abbr); -static ssize_t +static Py_ssize_t parse_tz_delta(const char *const p, long *total_seconds); -static ssize_t +static Py_ssize_t parse_transition_time(const char *const p, int8_t *hour, int8_t *minute, int8_t *second); -static ssize_t +static Py_ssize_t parse_transition_rule(const char *const p, TransitionRuleType **out); static _ttinfo * @@ -891,12 +891,12 @@ load_data(PyZoneInfo_ZoneInfo *self, PyObject *file_obj) // Load the relevant sizes Py_ssize_t num_transitions = PyTuple_Size(trans_utc); - if (num_transitions == -1) { + if (num_transitions < 0) { goto error; } Py_ssize_t num_ttinfos = PyTuple_Size(utcoff_list); - if (num_ttinfos == -1) { + if (num_ttinfos < 0) { goto error; } @@ -908,7 +908,7 @@ load_data(PyZoneInfo_ZoneInfo *self, PyObject *file_obj) PyMem_Malloc(self->num_transitions * sizeof(int64_t)); trans_idx = PyMem_Malloc(self->num_transitions * sizeof(Py_ssize_t)); - for (Py_ssize_t i = 0; i < self->num_transitions; ++i) { + for (size_t i = 0; i < self->num_transitions; ++i) { PyObject *num = PyTuple_GetItem(trans_utc, i); if (num == NULL) { goto error; @@ -946,7 +946,7 @@ load_data(PyZoneInfo_ZoneInfo *self, PyObject *file_obj) if (utcoff == NULL || isdst == NULL) { goto error; } - for (Py_ssize_t i = 0; i < self->num_ttinfos; ++i) { + for (size_t i = 0; i < self->num_ttinfos; ++i) { PyObject *num = PyTuple_GetItem(utcoff_list, i); if (num == NULL) { goto error; @@ -1468,7 +1468,7 @@ parse_tz_str(PyObject *tz_str_obj, _tzrule *out) char *p = tz_str; // Read the `std` abbreviation, which must be at least 3 characters long. - ssize_t num_chars = parse_abbr(p, &std_abbr); + Py_ssize_t num_chars = parse_abbr(p, &std_abbr); if (num_chars < 1) { PyErr_Format(PyExc_ValueError, "Invalid STD format in %R", tz_str_obj); goto error; @@ -1565,18 +1565,19 @@ parse_tz_str(PyObject *tz_str_obj, _tzrule *out) return -1; } -static ssize_t -parse_uint(const char *const p) +static int +parse_uint(const char *const p, uint8_t *value) { if (!isdigit(*p)) { return -1; } - return (*p) - '0'; + *value = (*p) - '0'; + return 0; } /* Parse the STD and DST abbreviations from a TZ string. */ -static ssize_t +static Py_ssize_t parse_abbr(const char *const p, PyObject **abbr) { const char *ptr = p; @@ -1629,7 +1630,7 @@ parse_abbr(const char *const p, PyObject **abbr) } /* Parse a UTC offset from a TZ str. */ -static ssize_t +static Py_ssize_t parse_tz_delta(const char *const p, long *total_seconds) { // From the POSIX spec: @@ -1712,7 +1713,7 @@ parse_tz_delta(const char *const p, long *total_seconds) } /* Parse the date portion of a transition rule. */ -static ssize_t +static Py_ssize_t parse_transition_rule(const char *const p, TransitionRuleType **out) { // The full transition rule indicates when to change back and forth between @@ -1739,20 +1740,18 @@ parse_transition_rule(const char *const p, TransitionRuleType **out) if (*ptr == 'M') { uint8_t month, week, day; ptr++; - ssize_t tmp = parse_uint(ptr); - if (tmp < 0) { + if (parse_uint(ptr, &month)) { return -1; } - month = (uint8_t)tmp; ptr++; if (*ptr != '.') { - tmp = parse_uint(ptr); - if (tmp < 0) { + uint8_t tmp; + if (parse_uint(ptr, &tmp)) { return -1; } month *= 10; - month += (uint8_t)tmp; + month += tmp; ptr++; } @@ -1763,18 +1762,15 @@ parse_transition_rule(const char *const p, TransitionRuleType **out) } ptr++; - tmp = parse_uint(ptr); - if (tmp < 0) { + if (parse_uint(ptr, values[i])) { return -1; } ptr++; - - *(values[i]) = tmp; } if (*ptr == '/') { ptr++; - ssize_t num_chars = + Py_ssize_t num_chars = parse_transition_time(ptr, &hour, &minute, &second); if (num_chars < 0) { return -1; @@ -1816,7 +1812,7 @@ parse_transition_rule(const char *const p, TransitionRuleType **out) if (*ptr == '/') { ptr++; - ssize_t num_chars = + Py_ssize_t num_chars = parse_transition_time(ptr, &hour, &minute, &second); if (num_chars < 0) { return -1; @@ -1840,7 +1836,7 @@ parse_transition_rule(const char *const p, TransitionRuleType **out) } /* Parse the time portion of a transition rule (e.g. following an /) */ -static ssize_t +static Py_ssize_t parse_transition_time(const char *const p, int8_t *hour, int8_t *minute, int8_t *second) { From webhook-mailer at python.org Wed May 27 17:32:27 2020 From: webhook-mailer at python.org (YoSTEALTH) Date: Wed, 27 May 2020 21:32:27 -0000 Subject: [Python-checkins] bpo-37129: Add os.RWF_APPEND flag for os.pwritev() (GH-20336) Message-ID: https://github.com/python/cpython/commit/76ef255bde772005bbd0761399b274c2240e61d3 commit: 76ef255bde772005bbd0761399b274c2240e61d3 branch: master author: YoSTEALTH <35307184+YoSTEALTH at users.noreply.github.com> committer: GitHub date: 2020-05-27T23:32:22+02:00 summary: bpo-37129: Add os.RWF_APPEND flag for os.pwritev() (GH-20336) files: A Misc/NEWS.d/next/Library/2020-05-23-04-18-00.bpo-37129.YoYoYo.rst M Doc/library/os.rst M Modules/clinic/posixmodule.c.h M Modules/posixmodule.c diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 6d5fb314a8e39..275b2d390e7cf 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -1211,6 +1211,7 @@ or `the MSDN `_ on Windo - :data:`RWF_DSYNC` - :data:`RWF_SYNC` + - :data:`RWF_APPEND` Return the total number of bytes actually written. @@ -1228,8 +1229,8 @@ or `the MSDN `_ on Windo .. data:: RWF_DSYNC - Provide a per-write equivalent of the :data:`O_DSYNC` ``open(2)`` flag. This - flag effect applies only to the data range written by the system call. + Provide a per-write equivalent of the :data:`O_DSYNC` :func:`os.open` flag. + This flag effect applies only to the data range written by the system call. .. availability:: Linux 4.7 and newer. @@ -1238,14 +1239,28 @@ or `the MSDN `_ on Windo .. data:: RWF_SYNC - Provide a per-write equivalent of the :data:`O_SYNC` ``open(2)`` flag. This - flag effect applies only to the data range written by the system call. + Provide a per-write equivalent of the :data:`O_SYNC` :func:`os.open` flag. + This flag effect applies only to the data range written by the system call. .. availability:: Linux 4.7 and newer. .. versionadded:: 3.7 +.. data:: RWF_APPEND + + Provide a per-write equivalent of the :data:`O_APPEND` :func:`os.open` + flag. This flag is meaningful only for :func:`os.pwritev`, and its + effect applies only to the data range written by the system call. The + *offset* argument does not affect the write operation; the data is always + appended to the end of the file. However, if the *offset* argument is + ``-1``, the current file *offset* is updated. + + .. availability:: Linux 4.16 and newer. + + .. versionadded:: 3.10 + + .. function:: read(fd, n) Read at most *n* bytes from file descriptor *fd*. diff --git a/Misc/NEWS.d/next/Library/2020-05-23-04-18-00.bpo-37129.YoYoYo.rst b/Misc/NEWS.d/next/Library/2020-05-23-04-18-00.bpo-37129.YoYoYo.rst new file mode 100644 index 0000000000000..e025e96f4f1c0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-23-04-18-00.bpo-37129.YoYoYo.rst @@ -0,0 +1 @@ +Add a new :data:`os.RWF_APPEND` flag for :func:`os.pwritev`. diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index 6f180abd25495..96ea02035ea41 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -5517,6 +5517,7 @@ PyDoc_STRVAR(os_pwritev__doc__, "\n" "- RWF_DSYNC\n" "- RWF_SYNC\n" +"- RWF_APPEND\n" "\n" "Using non-zero flags requires Linux 4.7 or newer."); @@ -8876,4 +8877,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=b97bbc8cb5078540 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=9623b9e6f3809842 input=a9049054013a1b77]*/ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 59ac47de1a709..47ae7a8c22ac2 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -9778,6 +9778,7 @@ The flags argument contains a bitwise OR of zero or more of the following flags: - RWF_DSYNC - RWF_SYNC +- RWF_APPEND Using non-zero flags requires Linux 4.7 or newer. [clinic start generated code]*/ @@ -9785,7 +9786,7 @@ Using non-zero flags requires Linux 4.7 or newer. static Py_ssize_t os_pwritev_impl(PyObject *module, int fd, PyObject *buffers, Py_off_t offset, int flags) -/*[clinic end generated code: output=e3dd3e9d11a6a5c7 input=803dc5ddbf0cfd3b]*/ +/*[clinic end generated code: output=e3dd3e9d11a6a5c7 input=35358c327e1a2a8e]*/ { Py_ssize_t cnt; Py_ssize_t result; @@ -14509,6 +14510,9 @@ all_ins(PyObject *m) #ifdef RWF_NOWAIT if (PyModule_AddIntConstant(m, "RWF_NOWAIT", RWF_NOWAIT)) return -1; #endif +#ifdef RWF_APPEND + if (PyModule_AddIntConstant(m, "RWF_APPEND", RWF_APPEND)) return -1; +#endif /* constants for posix_spawn */ #ifdef HAVE_POSIX_SPAWN From webhook-mailer at python.org Wed May 27 17:33:18 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Wed, 27 May 2020 21:33:18 -0000 Subject: [Python-checkins] Revert "bpo-32604: PEP 554 for use in test suite (GH-19985)" (#20465) Message-ID: https://github.com/python/cpython/commit/7d80b35af1ee03834ae4af83e920dee89c2bc273 commit: 7d80b35af1ee03834ae4af83e920dee89c2bc273 branch: master author: Pablo Galindo committer: GitHub date: 2020-05-27T23:33:13+02:00 summary: Revert "bpo-32604: PEP 554 for use in test suite (GH-19985)" (#20465) This reverts commit 9d17cbf33df7cfb67ca0f37f6463ba5c18676641. files: D Lib/test/support/interpreters.py D Lib/test/support/interpreters.rst D Lib/test/test_interpreters.py D Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst diff --git a/Lib/test/support/interpreters.py b/Lib/test/support/interpreters.py deleted file mode 100644 index ef9dcafb2a386..0000000000000 --- a/Lib/test/support/interpreters.py +++ /dev/null @@ -1,183 +0,0 @@ -"""Subinterpreters High Level Module.""" - -import _xxsubinterpreters as _interpreters - -# aliases: -from _xxsubinterpreters import ( - ChannelError, ChannelNotFoundError, ChannelEmptyError, - is_shareable, -) - - -__all__ = [ - 'Interpreter', 'get_current', 'get_main', 'create', 'list_all', - 'SendChannel', 'RecvChannel', - 'create_channel', 'list_all_channels', 'is_shareable', - 'ChannelError', 'ChannelNotFoundError', - 'ChannelEmptyError', - ] - - -def create(*, isolated=True): - """ - Initialize a new (idle) Python interpreter. - """ - id = _interpreters.create(isolated=isolated) - return Interpreter(id, isolated=isolated) - - -def list_all(): - """ - Get all existing interpreters. - """ - return [Interpreter(id) for id in - _interpreters.list_all()] - - -def get_current(): - """ - Get the currently running interpreter. - """ - id = _interpreters.get_current() - return Interpreter(id) - - -def get_main(): - """ - Get the main interpreter. - """ - id = _interpreters.get_main() - return Interpreter(id) - - -class Interpreter: - """ - The Interpreter object represents - a single interpreter. - """ - - def __init__(self, id, *, isolated=None): - self._id = id - self._isolated = isolated - - @property - def id(self): - return self._id - - @property - def isolated(self): - if self._isolated is None: - self._isolated = _interpreters.is_isolated(self._id) - return self._isolated - - def is_running(self): - """ - Return whether or not the identified - interpreter is running. - """ - return _interpreters.is_running(self._id) - - def close(self): - """ - Finalize and destroy the interpreter. - - Attempting to destroy the current - interpreter results in a RuntimeError. - """ - return _interpreters.destroy(self._id) - - def run(self, src_str, /, *, channels=None): - """ - Run the given source code in the interpreter. - This blocks the current Python thread until done. - """ - _interpreters.run_string(self._id, src_str) - - -def create_channel(): - """ - Create a new channel for passing data between - interpreters. - """ - - cid = _interpreters.channel_create() - return (RecvChannel(cid), SendChannel(cid)) - - -def list_all_channels(): - """ - Get all open channels. - """ - return [(RecvChannel(cid), SendChannel(cid)) - for cid in _interpreters.channel_list_all()] - - -_NOT_SET = object() - - -class RecvChannel: - """ - The RecvChannel object represents - a recieving channel. - """ - - def __init__(self, id): - self._id = id - - def recv(self, *, _delay=10 / 1000): # 10 milliseconds - """ - Get the next object from the channel, - and wait if none have been sent. - Associate the interpreter with the channel. - """ - import time - sentinel = object() - obj = _interpreters.channel_recv(self._id, sentinel) - while obj is sentinel: - time.sleep(_delay) - obj = _interpreters.channel_recv(self._id, sentinel) - return obj - - def recv_nowait(self, default=_NOT_SET): - """ - Like recv(), but return the default - instead of waiting. - - This function is blocked by a missing low-level - implementation of channel_recv_wait(). - """ - if default is _NOT_SET: - return _interpreters.channel_recv(self._id) - else: - return _interpreters.channel_recv(self._id, default) - - -class SendChannel: - """ - The SendChannel object represents - a sending channel. - """ - - def __init__(self, id): - self._id = id - - def send(self, obj): - """ - Send the object (i.e. its data) to the receiving - end of the channel and wait. Associate the interpreter - with the channel. - """ - import time - _interpreters.channel_send(self._id, obj) - time.sleep(2) - - def send_nowait(self, obj): - """ - Like send(), but return False if not received. - - This function is blocked by a missing low-level - implementation of channel_send_wait(). - """ - - _interpreters.channel_send(self._id, obj) - return False diff --git a/Lib/test/support/interpreters.rst b/Lib/test/support/interpreters.rst deleted file mode 100644 index 37a60b1072a66..0000000000000 --- a/Lib/test/support/interpreters.rst +++ /dev/null @@ -1,145 +0,0 @@ -High-level implementation of Subinterpreters -============================================ - -**Source code:** :source:`Lib/test/support/_interpreters.py` - --------------- - -This module provides high-level tools for working with sub-interpreters, -such as creating them, running code in them, or sending data between them. -It is a wrapper around the low-level ``__xxsubinterpreters`` module. - -.. versionchanged:: added in 3.9 - -Interpreter Objects -------------------- - -The ``Interpreter`` object represents a single interpreter. - -.. class:: Interpreter(id) - - The class implementing a subinterpreter object. - - .. method:: is_running() - - Return ``True`` if the identified interpreter is running. - - .. method:: close() - - Destroy the interpreter. Attempting to destroy the current - interpreter results in a `RuntimeError`. - - .. method:: run(self, src_str, /, *, channels=None): - - Run the given source code in the interpreter. This blocks - the current thread until done. ``channels`` should be in - the form : `(RecvChannel, SendChannel)`. - -RecvChannel Objects -------------------- - -The ``RecvChannel`` object represents a recieving channel. - -.. class:: RecvChannel(id) - - This class represents the receiving end of a channel. - - .. method:: recv() - - Get the next object from the channel, and wait if - none have been sent. Associate the interpreter - with the channel. - - .. method:: recv_nowait(default=None) - - Like ``recv()``, but return the default result - instead of waiting. - - -SendChannel Objects --------------------- - -The ``SendChannel`` object represents a sending channel. - -.. class:: SendChannel(id) - - This class represents the sending end of a channel. - - .. method:: send(obj) - - Send the object ``obj`` to the receiving end of the channel - and wait. Associate the interpreter with the channel. - - .. method:: send_nowait(obj) - - Similar to ``send()``, but returns ``False`` if - *obj* is not immediately received instead of blocking. - - -This module defines the following global functions: - - -.. function:: is_shareable(obj) - - Return ``True`` if the object's data can be shared between - interpreters. - -.. function:: create_channel() - - Create a new channel for passing data between interpreters. - -.. function:: list_all_channels() - - Return all open channels. - -.. function:: create(*, isolated=True) - - Initialize a new (idle) Python interpreter. Get the currently - running interpreter. This method returns an ``Interpreter`` object. - -.. function:: get_current() - - Get the currently running interpreter. This method returns - an ``Interpreter`` object. - -.. function:: get_main() - - Get the main interpreter. This method returns - an ``Interpreter`` object. - -.. function:: list_all() - - Get all existing interpreters. Returns a list - of ``Interpreter`` objects. - -This module also defines the following exceptions. - -.. exception:: RunFailedError - - This exception, a subclass of :exc:`RuntimeError`, is raised when the - ``Interpreter.run()`` results in an uncaught exception. - -.. exception:: ChannelError - - This exception is a subclass of :exc:`Exception`, and is the base - class for all channel-related exceptions. - -.. exception:: ChannelNotFoundError - - This exception is a subclass of :exc:`ChannelError`, and is raised - when the identified channel is not found. - -.. exception:: ChannelEmptyError - - This exception is a subclass of :exc:`ChannelError`, and is raised when - the channel is unexpectedly empty. - -.. exception:: ChannelNotEmptyError - - This exception is a subclass of :exc:`ChannelError`, and is raised when - the channel is unexpectedly not empty. - -.. exception:: NotReceivedError - - This exception is a subclass of :exc:`ChannelError`, and is raised when - nothing was waiting to receive a sent object. diff --git a/Lib/test/test_interpreters.py b/Lib/test/test_interpreters.py deleted file mode 100644 index 3451a4c8759d8..0000000000000 --- a/Lib/test/test_interpreters.py +++ /dev/null @@ -1,535 +0,0 @@ -import contextlib -import os -import threading -from textwrap import dedent -import unittest -import time - -import _xxsubinterpreters as _interpreters -from test.support import interpreters - - -def _captured_script(script): - r, w = os.pipe() - indented = script.replace('\n', '\n ') - wrapped = dedent(f""" - import contextlib - with open({w}, 'w') as spipe: - with contextlib.redirect_stdout(spipe): - {indented} - """) - return wrapped, open(r) - - -def clean_up_interpreters(): - for interp in interpreters.list_all(): - if interp.id == 0: # main - continue - try: - interp.close() - except RuntimeError: - pass # already destroyed - - -def _run_output(interp, request, shared=None): - script, rpipe = _captured_script(request) - with rpipe: - interp.run(script) - return rpipe.read() - - - at contextlib.contextmanager -def _running(interp): - r, w = os.pipe() - def run(): - interp.run(dedent(f""" - # wait for "signal" - with open({r}) as rpipe: - rpipe.read() - """)) - - t = threading.Thread(target=run) - t.start() - - yield - - with open(w, 'w') as spipe: - spipe.write('done') - t.join() - - -class TestBase(unittest.TestCase): - - def tearDown(self): - clean_up_interpreters() - - -class CreateTests(TestBase): - - def test_in_main(self): - interp = interpreters.create() - lst = interpreters.list_all() - self.assertEqual(interp.id, lst[1].id) - - def test_in_thread(self): - lock = threading.Lock() - id = None - interp = interpreters.create() - lst = interpreters.list_all() - def f(): - nonlocal id - id = interp.id - lock.acquire() - lock.release() - - t = threading.Thread(target=f) - with lock: - t.start() - t.join() - self.assertEqual(interp.id, lst[1].id) - - def test_in_subinterpreter(self): - main, = interpreters.list_all() - interp = interpreters.create() - out = _run_output(interp, dedent(""" - from test.support import interpreters - interp = interpreters.create() - print(interp) - """)) - interp2 = out.strip() - - self.assertEqual(len(set(interpreters.list_all())), len({main, interp, interp2})) - - def test_after_destroy_all(self): - before = set(interpreters.list_all()) - # Create 3 subinterpreters. - interp_lst = [] - for _ in range(3): - interps = interpreters.create() - interp_lst.append(interps) - # Now destroy them. - for interp in interp_lst: - interp.close() - # Finally, create another. - interp = interpreters.create() - self.assertEqual(len(set(interpreters.list_all())), len(before | {interp})) - - def test_after_destroy_some(self): - before = set(interpreters.list_all()) - # Create 3 subinterpreters. - interp1 = interpreters.create() - interp2 = interpreters.create() - interp3 = interpreters.create() - # Now destroy 2 of them. - interp1.close() - interp2.close() - # Finally, create another. - interp = interpreters.create() - self.assertEqual(len(set(interpreters.list_all())), len(before | {interp3, interp})) - - -class GetCurrentTests(TestBase): - - def test_main(self): - main_interp_id = _interpreters.get_main() - cur_interp_id = interpreters.get_current().id - self.assertEqual(cur_interp_id, main_interp_id) - - def test_subinterpreter(self): - main = _interpreters.get_main() - interp = interpreters.create() - out = _run_output(interp, dedent(""" - from test.support import interpreters - cur = interpreters.get_current() - print(cur) - """)) - cur = out.strip() - self.assertNotEqual(cur, main) - - -class ListAllTests(TestBase): - - def test_initial(self): - interps = interpreters.list_all() - self.assertEqual(1, len(interps)) - - def test_after_creating(self): - main = interpreters.get_current() - first = interpreters.create() - second = interpreters.create() - - ids = [] - for interp in interpreters.list_all(): - ids.append(interp.id) - - self.assertEqual(ids, [main.id, first.id, second.id]) - - def test_after_destroying(self): - main = interpreters.get_current() - first = interpreters.create() - second = interpreters.create() - first.close() - - ids = [] - for interp in interpreters.list_all(): - ids.append(interp.id) - - self.assertEqual(ids, [main.id, second.id]) - - -class TestInterpreterId(TestBase): - - def test_in_main(self): - main = interpreters.get_current() - self.assertEqual(0, main.id) - - def test_with_custom_num(self): - interp = interpreters.Interpreter(1) - self.assertEqual(1, interp.id) - - def test_for_readonly_property(self): - interp = interpreters.Interpreter(1) - with self.assertRaises(AttributeError): - interp.id = 2 - - -class TestInterpreterIsRunning(TestBase): - - def test_main(self): - main = interpreters.get_current() - self.assertTrue(main.is_running()) - - def test_subinterpreter(self): - interp = interpreters.create() - self.assertFalse(interp.is_running()) - - with _running(interp): - self.assertTrue(interp.is_running()) - self.assertFalse(interp.is_running()) - - def test_from_subinterpreter(self): - interp = interpreters.create() - out = _run_output(interp, dedent(f""" - import _xxsubinterpreters as _interpreters - if _interpreters.is_running({interp.id}): - print(True) - else: - print(False) - """)) - self.assertEqual(out.strip(), 'True') - - def test_already_destroyed(self): - interp = interpreters.create() - interp.close() - with self.assertRaises(RuntimeError): - interp.is_running() - - -class TestInterpreterDestroy(TestBase): - - def test_basic(self): - interp1 = interpreters.create() - interp2 = interpreters.create() - interp3 = interpreters.create() - self.assertEqual(4, len(interpreters.list_all())) - interp2.close() - self.assertEqual(3, len(interpreters.list_all())) - - def test_all(self): - before = set(interpreters.list_all()) - interps = set() - for _ in range(3): - interp = interpreters.create() - interps.add(interp) - self.assertEqual(len(set(interpreters.list_all())), len(before | interps)) - for interp in interps: - interp.close() - self.assertEqual(len(set(interpreters.list_all())), len(before)) - - def test_main(self): - main, = interpreters.list_all() - with self.assertRaises(RuntimeError): - main.close() - - def f(): - with self.assertRaises(RuntimeError): - main.close() - - t = threading.Thread(target=f) - t.start() - t.join() - - def test_already_destroyed(self): - interp = interpreters.create() - interp.close() - with self.assertRaises(RuntimeError): - interp.close() - - def test_from_current(self): - main, = interpreters.list_all() - interp = interpreters.create() - script = dedent(f""" - from test.support import interpreters - try: - main = interpreters.get_current() - main.close() - except RuntimeError: - pass - """) - - interp.run(script) - self.assertEqual(len(set(interpreters.list_all())), len({main, interp})) - - def test_from_sibling(self): - main, = interpreters.list_all() - interp1 = interpreters.create() - script = dedent(f""" - from test.support import interpreters - interp2 = interpreters.create() - interp2.close() - """) - interp1.run(script) - - self.assertEqual(len(set(interpreters.list_all())), len({main, interp1})) - - def test_from_other_thread(self): - interp = interpreters.create() - def f(): - interp.close() - - t = threading.Thread(target=f) - t.start() - t.join() - - def test_still_running(self): - main, = interpreters.list_all() - interp = interpreters.create() - with _running(interp): - with self.assertRaises(RuntimeError): - interp.close() - self.assertTrue(interp.is_running()) - - -class TestInterpreterRun(TestBase): - - SCRIPT = dedent(""" - with open('{}', 'w') as out: - out.write('{}') - """) - FILENAME = 'spam' - - def setUp(self): - super().setUp() - self.interp = interpreters.create() - self._fs = None - - def tearDown(self): - if self._fs is not None: - self._fs.close() - super().tearDown() - - @property - def fs(self): - if self._fs is None: - self._fs = FSFixture(self) - return self._fs - - def test_success(self): - script, file = _captured_script('print("it worked!", end="")') - with file: - self.interp.run(script) - out = file.read() - - self.assertEqual(out, 'it worked!') - - def test_in_thread(self): - script, file = _captured_script('print("it worked!", end="")') - with file: - def f(): - self.interp.run(script) - - t = threading.Thread(target=f) - t.start() - t.join() - out = file.read() - - self.assertEqual(out, 'it worked!') - - @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()") - def test_fork(self): - import tempfile - with tempfile.NamedTemporaryFile('w+') as file: - file.write('') - file.flush() - - expected = 'spam spam spam spam spam' - script = dedent(f""" - import os - try: - os.fork() - except RuntimeError: - with open('{file.name}', 'w') as out: - out.write('{expected}') - """) - self.interp.run(script) - - file.seek(0) - content = file.read() - self.assertEqual(content, expected) - - def test_already_running(self): - with _running(self.interp): - with self.assertRaises(RuntimeError): - self.interp.run('print("spam")') - - def test_bad_script(self): - with self.assertRaises(TypeError): - self.interp.run(10) - - def test_bytes_for_script(self): - with self.assertRaises(TypeError): - self.interp.run(b'print("spam")') - - -class TestIsShareable(TestBase): - - def test_default_shareables(self): - shareables = [ - # singletons - None, - # builtin objects - b'spam', - 'spam', - 10, - -10, - ] - for obj in shareables: - with self.subTest(obj): - self.assertTrue( - interpreters.is_shareable(obj)) - - def test_not_shareable(self): - class Cheese: - def __init__(self, name): - self.name = name - def __str__(self): - return self.name - - class SubBytes(bytes): - """A subclass of a shareable type.""" - - not_shareables = [ - # singletons - True, - False, - NotImplemented, - ..., - # builtin types and objects - type, - object, - object(), - Exception(), - 100.0, - # user-defined types and objects - Cheese, - Cheese('Wensleydale'), - SubBytes(b'spam'), - ] - for obj in not_shareables: - with self.subTest(repr(obj)): - self.assertFalse( - interpreters.is_shareable(obj)) - - -class TestChannel(TestBase): - - def test_create_cid(self): - r, s = interpreters.create_channel() - self.assertIsInstance(r, interpreters.RecvChannel) - self.assertIsInstance(s, interpreters.SendChannel) - - def test_sequential_ids(self): - before = interpreters.list_all_channels() - channels1 = interpreters.create_channel() - channels2 = interpreters.create_channel() - channels3 = interpreters.create_channel() - after = interpreters.list_all_channels() - - self.assertEqual(len(set(after) - set(before)), - len({channels1, channels2, channels3})) - - -class TestSendRecv(TestBase): - - def test_send_recv_main(self): - r, s = interpreters.create_channel() - orig = b'spam' - s.send(orig) - obj = r.recv() - - self.assertEqual(obj, orig) - self.assertIsNot(obj, orig) - - def test_send_recv_same_interpreter(self): - interp = interpreters.create() - out = _run_output(interp, dedent(""" - from test.support import interpreters - r, s = interpreters.create_channel() - orig = b'spam' - s.send(orig) - obj = r.recv() - assert obj is not orig - assert obj == orig - """)) - - def test_send_recv_different_threads(self): - r, s = interpreters.create_channel() - - def f(): - while True: - try: - obj = r.recv() - break - except interpreters.ChannelEmptyError: - time.sleep(0.1) - s.send(obj) - t = threading.Thread(target=f) - t.start() - - s.send(b'spam') - t.join() - obj = r.recv() - - self.assertEqual(obj, b'spam') - - def test_send_recv_nowait_main(self): - r, s = interpreters.create_channel() - orig = b'spam' - s.send(orig) - obj = r.recv_nowait() - - self.assertEqual(obj, orig) - self.assertIsNot(obj, orig) - - def test_send_recv_nowait_same_interpreter(self): - interp = interpreters.create() - out = _run_output(interp, dedent(""" - from test.support import interpreters - r, s = interpreters.create_channel() - orig = b'spam' - s.send(orig) - obj = r.recv_nowait() - assert obj is not orig - assert obj == orig - """)) - - r, s = interpreters.create_channel() - - def f(): - while True: - try: - obj = r.recv_nowait() - break - except _interpreters.ChannelEmptyError: - time.sleep(0.1) - s.send(obj) diff --git a/Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst b/Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst deleted file mode 100644 index 1129cd7649b96..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst +++ /dev/null @@ -1,2 +0,0 @@ -PEP 554 for use in the test suite. -(Patch By Joannah Nanjekye) \ No newline at end of file From webhook-mailer at python.org Wed May 27 18:10:35 2020 From: webhook-mailer at python.org (Hai Shi) Date: Wed, 27 May 2020 22:10:35 -0000 Subject: [Python-checkins] bpo-40275: Adding threading_helper submodule in test.support (GH-20263) Message-ID: https://github.com/python/cpython/commit/e80697d687b610bd7fb9104af905dec8f0bc55a7 commit: e80697d687b610bd7fb9104af905dec8f0bc55a7 branch: master author: Hai Shi committer: GitHub date: 2020-05-28T00:10:27+02:00 summary: bpo-40275: Adding threading_helper submodule in test.support (GH-20263) files: A Lib/test/support/threading_helper.py M Doc/library/test.rst M Lib/test/_test_multiprocessing.py M Lib/test/fork_wait.py M Lib/test/lock_tests.py M Lib/test/pickletester.py M Lib/test/support/__init__.py M Lib/test/test_asynchat.py M Lib/test/test_asyncio/test_events.py M Lib/test/test_asyncio/utils.py M Lib/test/test_asyncore.py M Lib/test/test_bz2.py M Lib/test/test_capi.py M Lib/test/test_concurrent_futures.py M Lib/test/test_email/test_email.py M Lib/test/test_enum.py M Lib/test/test_ftplib.py M Lib/test/test_functools.py M Lib/test/test_gc.py M Lib/test/test_hashlib.py M Lib/test/test_httpservers.py M Lib/test/test_imaplib.py M Lib/test/test_import/__init__.py M Lib/test/test_importlib/test_locks.py M Lib/test/test_importlib/test_threaded_import.py M Lib/test/test_io.py M Lib/test/test_logging.py M Lib/test/test_os.py M Lib/test/test_poll.py M Lib/test/test_poplib.py M Lib/test/test_pydoc.py M Lib/test/test_queue.py M Lib/test/test_robotparser.py M Lib/test/test_sched.py M Lib/test/test_smtplib.py M Lib/test/test_socket.py M Lib/test/test_socketserver.py M Lib/test/test_ssl.py M Lib/test/test_sys.py M Lib/test/test_thread.py M Lib/test/test_threadedtempfile.py M Lib/test/test_threading.py M Lib/test/test_threading_local.py M Lib/test/test_threadsignals.py M Lib/test/test_urllib2_localnet.py M Lib/test/test_xmlrpc.py diff --git a/Doc/library/test.rst b/Doc/library/test.rst index f7e6eba018161..7bee6e8031a05 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -838,18 +838,6 @@ The :mod:`test.support` module defines the following functions: .. versionadded:: 3.9 -.. function:: wait_threads_exit(timeout=60.0) - - Context manager to wait until all threads created in the ``with`` statement - exit. - - -.. function:: start_threads(threads, unlock=None) - - Context manager to start *threads*. It attempts to join the threads upon - exit. - - .. function:: calcobjsize(fmt) Return :func:`struct.calcsize` for ``nP{fmt}0n`` or, if ``gettotalrefcount`` @@ -988,11 +976,6 @@ The :mod:`test.support` module defines the following functions: the trace function. -.. decorator:: reap_threads(func) - - Decorator to ensure the threads are cleaned up even if the test fails. - - .. decorator:: bigmemtest(size, memuse, dry_run=True) Decorator for bigmem tests. @@ -1110,23 +1093,6 @@ The :mod:`test.support` module defines the following functions: preserve internal cache. -.. function:: threading_setup() - - Return current thread count and copy of dangling threads. - - -.. function:: threading_cleanup(*original_values) - - Cleanup up threads not specified in *original_values*. Designed to emit - a warning if a test leaves running threads in the background. - - -.. function:: join_thread(thread, timeout=30.0) - - Join a *thread* within *timeout*. Raise an :exc:`AssertionError` if thread - is still alive after *timeout* seconds. - - .. function:: reap_children() Use this at the end of ``test_main`` whenever sub-processes are started. @@ -1140,39 +1106,6 @@ The :mod:`test.support` module defines the following functions: is raised. -.. function:: catch_threading_exception() - - Context manager catching :class:`threading.Thread` exception using - :func:`threading.excepthook`. - - Attributes set when an exception is catched: - - * ``exc_type`` - * ``exc_value`` - * ``exc_traceback`` - * ``thread`` - - See :func:`threading.excepthook` documentation. - - These attributes are deleted at the context manager exit. - - Usage:: - - with support.catch_threading_exception() as cm: - # code spawning a thread which raises an exception - ... - - # check the thread exception, use cm attributes: - # exc_type, exc_value, exc_traceback, thread - ... - - # exc_type, exc_value, exc_traceback, thread attributes of cm no longer - # exists at this point - # (to avoid reference cycles) - - .. versionadded:: 3.8 - - .. function:: catch_unraisable_exception() Context manager catching unraisable exception using @@ -1628,3 +1561,81 @@ The module defines the following class: .. method:: BytecodeTestCase.assertNotInBytecode(x, opname, argval=_UNSPECIFIED) Throws :exc:`AssertionError` if *opname* is found. + + +:mod:`test.support.threading_helper` --- Utilities for threading tests +====================================================================== + +.. module:: test.support.threading_helper + :synopsis: Support for threading tests. + +The :mod:`test.support.threading_helper` module provides support for threading tests. + +.. versionadded:: 3.10 + + +.. function:: join_thread(thread, timeout=None) + + Join a *thread* within *timeout*. Raise an :exc:`AssertionError` if thread + is still alive after *timeout* seconds. + + +.. decorator:: reap_threads(func) + + Decorator to ensure the threads are cleaned up even if the test fails. + + +.. function:: start_threads(threads, unlock=None) + + Context manager to start *threads*. It attempts to join the threads upon + exit. + + +.. function:: threading_cleanup(*original_values) + + Cleanup up threads not specified in *original_values*. Designed to emit + a warning if a test leaves running threads in the background. + + +.. function:: threading_setup() + + Return current thread count and copy of dangling threads. + + +.. function:: wait_threads_exit(timeout=None) + + Context manager to wait until all threads created in the ``with`` statement + exit. + + +.. function:: catch_threading_exception() + + Context manager catching :class:`threading.Thread` exception using + :func:`threading.excepthook`. + + Attributes set when an exception is catched: + + * ``exc_type`` + * ``exc_value`` + * ``exc_traceback`` + * ``thread`` + + See :func:`threading.excepthook` documentation. + + These attributes are deleted at the context manager exit. + + Usage:: + + with threading_helper.catch_threading_exception() as cm: + # code spawning a thread which raises an exception + ... + + # check the thread exception, use cm attributes: + # exc_type, exc_value, exc_traceback, thread + ... + + # exc_type, exc_value, exc_traceback, thread attributes of cm no longer + # exists at this point + # (to avoid reference cycles) + + .. versionadded:: 3.8 diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 155a8276e7507..bbba2b45e5f03 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -27,6 +27,7 @@ import test.support.script_helper from test import support from test.support import socket_helper +from test.support import threading_helper # Skip tests if _multiprocessing wasn't built. @@ -81,7 +82,7 @@ def close_queue(queue): def join_process(process): # Since multiprocessing.Process has the same API than threading.Thread # (join() and is_alive(), the support function can be reused - support.join_thread(process) + threading_helper.join_thread(process) if os.name == "posix": @@ -4234,7 +4235,7 @@ def make_finalizers(): gc.set_threshold(5, 5, 5) threads = [threading.Thread(target=run_finalizers), threading.Thread(target=make_finalizers)] - with test.support.start_threads(threads): + with threading_helper.start_threads(threads): time.sleep(4.0) # Wait a bit to trigger race condition finish = True if exc is not None: diff --git a/Lib/test/fork_wait.py b/Lib/test/fork_wait.py index 249b5e9607329..4d3dbd8e83f5a 100644 --- a/Lib/test/fork_wait.py +++ b/Lib/test/fork_wait.py @@ -12,6 +12,7 @@ import os, sys, time, unittest import threading from test import support +from test.support import threading_helper LONGSLEEP = 2 @@ -21,7 +22,7 @@ class ForkWait(unittest.TestCase): def setUp(self): - self._threading_key = support.threading_setup() + self._threading_key = threading_helper.threading_setup() self.alive = {} self.stop = 0 self.threads = [] @@ -33,7 +34,7 @@ def tearDown(self): thread.join() thread = None self.threads.clear() - support.threading_cleanup(*self._threading_key) + threading_helper.threading_cleanup(*self._threading_key) def f(self, id): while not self.stop: diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py index b3975254c79b5..d69bcc9496843 100644 --- a/Lib/test/lock_tests.py +++ b/Lib/test/lock_tests.py @@ -11,6 +11,7 @@ import weakref from test import support +from test.support import threading_helper requires_fork = unittest.skipUnless(hasattr(os, 'fork'), @@ -37,7 +38,7 @@ def __init__(self, f, n, wait_before_exit=False): self.started = [] self.finished = [] self._can_exit = not wait_before_exit - self.wait_thread = support.wait_threads_exit() + self.wait_thread = threading_helper.wait_threads_exit() self.wait_thread.__enter__() def task(): @@ -73,10 +74,10 @@ def do_finish(self): class BaseTestCase(unittest.TestCase): def setUp(self): - self._threads = support.threading_setup() + self._threads = threading_helper.threading_setup() def tearDown(self): - support.threading_cleanup(*self._threads) + threading_helper.threading_cleanup(*self._threads) support.reap_children() def assertTimeout(self, actual, expected): @@ -239,7 +240,7 @@ def f(): lock.acquire() phase.append(None) - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): start_new_thread(f, ()) while len(phase) == 0: _wait() diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index 6ef4c8989f55b..ca566a28d60a9 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -29,8 +29,9 @@ from test import support from test.support import ( TestFailed, TESTFN, run_with_locale, no_tracing, - _2G, _4G, bigmemtest, reap_threads, forget, + _2G, _4G, bigmemtest, forget, ) +from test.support import threading_helper from pickle import bytes_types @@ -1350,7 +1351,7 @@ def test_truncated_data(self): for p in badpickles: self.check_unpickling_error(self.truncated_errors, p) - @reap_threads + @threading_helper.reap_threads def test_unpickle_module_race(self): # https://bugs.python.org/issue34572 locker_module = dedent(""" diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 8dee5b9dcc7ab..e894545f87e42 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -19,8 +19,6 @@ import subprocess import sys import sysconfig -import _thread -import threading import time import types import unittest @@ -62,8 +60,6 @@ "open_urlresource", # processes 'temp_umask', "reap_children", - # threads - "threading_setup", "threading_cleanup", "reap_threads", "start_threads", # miscellaneous "check_warnings", "check_no_resource_warning", "check_no_warnings", "EnvironmentVarGuard", @@ -1991,120 +1987,14 @@ def modules_cleanup(oldmodules): # Implicitly imported *real* modules should be left alone (see issue 10556). sys.modules.update(oldmodules) -#======================================================================= -# Threading support to prevent reporting refleaks when running regrtest.py -R - # Flag used by saved_test_environment of test.libregrtest.save_env, # to check if a test modified the environment. The flag should be set to False # before running a new test. # -# For example, threading_cleanup() sets the flag is the function fails +# For example, threading_helper.threading_cleanup() sets the flag is the function fails # to cleanup threads. environment_altered = False -# NOTE: we use thread._count() rather than threading.enumerate() (or the -# moral equivalent thereof) because a threading.Thread object is still alive -# until its __bootstrap() method has returned, even after it has been -# unregistered from the threading module. -# thread._count(), on the other hand, only gets decremented *after* the -# __bootstrap() method has returned, which gives us reliable reference counts -# at the end of a test run. - -def threading_setup(): - return _thread._count(), threading._dangling.copy() - -def threading_cleanup(*original_values): - global environment_altered - - _MAX_COUNT = 100 - - for count in range(_MAX_COUNT): - values = _thread._count(), threading._dangling - if values == original_values: - break - - if not count: - # Display a warning at the first iteration - environment_altered = True - dangling_threads = values[1] - print_warning(f"threading_cleanup() failed to cleanup " - f"{values[0] - original_values[0]} threads " - f"(count: {values[0]}, " - f"dangling: {len(dangling_threads)})") - for thread in dangling_threads: - print_warning(f"Dangling thread: {thread!r}") - - # Don't hold references to threads - dangling_threads = None - values = None - - time.sleep(0.01) - gc_collect() - - -def reap_threads(func): - """Use this function when threads are being used. This will - ensure that the threads are cleaned up even when the test fails. - """ - @functools.wraps(func) - def decorator(*args): - key = threading_setup() - try: - return func(*args) - finally: - threading_cleanup(*key) - return decorator - - - at contextlib.contextmanager -def wait_threads_exit(timeout=None): - """ - bpo-31234: Context manager to wait until all threads created in the with - statement exit. - - Use _thread.count() to check if threads exited. Indirectly, wait until - threads exit the internal t_bootstrap() C function of the _thread module. - - threading_setup() and threading_cleanup() are designed to emit a warning - if a test leaves running threads in the background. This context manager - is designed to cleanup threads started by the _thread.start_new_thread() - which doesn't allow to wait for thread exit, whereas thread.Thread has a - join() method. - """ - if timeout is None: - timeout = SHORT_TIMEOUT - old_count = _thread._count() - try: - yield - finally: - start_time = time.monotonic() - deadline = start_time + timeout - while True: - count = _thread._count() - if count <= old_count: - break - if time.monotonic() > deadline: - dt = time.monotonic() - start_time - msg = (f"wait_threads() failed to cleanup {count - old_count} " - f"threads after {dt:.1f} seconds " - f"(count: {count}, old count: {old_count})") - raise AssertionError(msg) - time.sleep(0.010) - gc_collect() - - -def join_thread(thread, timeout=None): - """Join a thread. Raise an AssertionError if the thread is still alive - after timeout seconds. - """ - if timeout is None: - timeout = SHORT_TIMEOUT - thread.join(timeout) - if thread.is_alive(): - msg = f"failed to join the thread in {timeout:.1f} seconds" - raise AssertionError(msg) - - def reap_children(): """Use this function at the end of test_main() whenever sub-processes are started. This will help ensure that no extra children (zombies) @@ -2133,43 +2023,6 @@ def reap_children(): environment_altered = True - at contextlib.contextmanager -def start_threads(threads, unlock=None): - import faulthandler - threads = list(threads) - started = [] - try: - try: - for t in threads: - t.start() - started.append(t) - except: - if verbose: - print("Can't start %d threads, only %d threads started" % - (len(threads), len(started))) - raise - yield - finally: - try: - if unlock: - unlock() - endtime = starttime = time.monotonic() - for timeout in range(1, 16): - endtime += 60 - for t in started: - t.join(max(endtime - time.monotonic(), 0.01)) - started = [t for t in started if t.is_alive()] - if not started: - break - if verbose: - print('Unable to join %d threads during a period of ' - '%d minutes' % (len(started), timeout)) - finally: - started = [t for t in started if t.is_alive()] - if started: - faulthandler.dump_traceback(sys.stdout) - raise AssertionError('Unable to join %d threads' % len(started)) - @contextlib.contextmanager def swap_attr(obj, attr, new_val): """Temporary swap out an attribute with a new object. @@ -3023,63 +2876,6 @@ def __exit__(self, *exc_info): del self.unraisable -class catch_threading_exception: - """ - Context manager catching threading.Thread exception using - threading.excepthook. - - Attributes set when an exception is catched: - - * exc_type - * exc_value - * exc_traceback - * thread - - See threading.excepthook() documentation for these attributes. - - These attributes are deleted at the context manager exit. - - Usage: - - with support.catch_threading_exception() as cm: - # code spawning a thread which raises an exception - ... - - # check the thread exception, use cm attributes: - # exc_type, exc_value, exc_traceback, thread - ... - - # exc_type, exc_value, exc_traceback, thread attributes of cm no longer - # exists at this point - # (to avoid reference cycles) - """ - - def __init__(self): - self.exc_type = None - self.exc_value = None - self.exc_traceback = None - self.thread = None - self._old_hook = None - - def _hook(self, args): - self.exc_type = args.exc_type - self.exc_value = args.exc_value - self.exc_traceback = args.exc_traceback - self.thread = args.thread - - def __enter__(self): - self._old_hook = threading.excepthook - threading.excepthook = self._hook - return self - - def __exit__(self, *exc_info): - threading.excepthook = self._old_hook - del self.exc_type - del self.exc_value - del self.exc_traceback - del self.thread - - def wait_process(pid, *, exitcode, timeout=None): """ Wait until process pid completes and check that the process exit code is diff --git a/Lib/test/support/threading_helper.py b/Lib/test/support/threading_helper.py new file mode 100644 index 0000000000000..96f7b3fcebfac --- /dev/null +++ b/Lib/test/support/threading_helper.py @@ -0,0 +1,208 @@ +import contextlib +import functools +import _thread +import threading +import time + +from test import support + + +#======================================================================= +# Threading support to prevent reporting refleaks when running regrtest.py -R + +# NOTE: we use thread._count() rather than threading.enumerate() (or the +# moral equivalent thereof) because a threading.Thread object is still alive +# until its __bootstrap() method has returned, even after it has been +# unregistered from the threading module. +# thread._count(), on the other hand, only gets decremented *after* the +# __bootstrap() method has returned, which gives us reliable reference counts +# at the end of a test run. + + +def threading_setup(): + return _thread._count(), threading._dangling.copy() + + +def threading_cleanup(*original_values): + _MAX_COUNT = 100 + + for count in range(_MAX_COUNT): + values = _thread._count(), threading._dangling + if values == original_values: + break + + if not count: + # Display a warning at the first iteration + support.environment_altered = True + dangling_threads = values[1] + support.print_warning(f"threading_cleanup() failed to cleanup " + f"{values[0] - original_values[0]} threads " + f"(count: {values[0]}, " + f"dangling: {len(dangling_threads)})") + for thread in dangling_threads: + support.print_warning(f"Dangling thread: {thread!r}") + + # Don't hold references to threads + dangling_threads = None + values = None + + time.sleep(0.01) + gc_collect() + + +def reap_threads(func): + """Use this function when threads are being used. This will + ensure that the threads are cleaned up even when the test fails. + """ + @functools.wraps(func) + def decorator(*args): + key = threading_setup() + try: + return func(*args) + finally: + threading_cleanup(*key) + return decorator + + + at contextlib.contextmanager +def wait_threads_exit(timeout=None): + """ + bpo-31234: Context manager to wait until all threads created in the with + statement exit. + + Use _thread.count() to check if threads exited. Indirectly, wait until + threads exit the internal t_bootstrap() C function of the _thread module. + + threading_setup() and threading_cleanup() are designed to emit a warning + if a test leaves running threads in the background. This context manager + is designed to cleanup threads started by the _thread.start_new_thread() + which doesn't allow to wait for thread exit, whereas thread.Thread has a + join() method. + """ + if timeout is None: + timeout = support.SHORT_TIMEOUT + old_count = _thread._count() + try: + yield + finally: + start_time = time.monotonic() + deadline = start_time + timeout + while True: + count = _thread._count() + if count <= old_count: + break + if time.monotonic() > deadline: + dt = time.monotonic() - start_time + msg = (f"wait_threads() failed to cleanup {count - old_count} " + f"threads after {dt:.1f} seconds " + f"(count: {count}, old count: {old_count})") + raise AssertionError(msg) + time.sleep(0.010) + gc_collect() + + +def join_thread(thread, timeout=None): + """Join a thread. Raise an AssertionError if the thread is still alive + after timeout seconds. + """ + if timeout is None: + timeout = support.SHORT_TIMEOUT + thread.join(timeout) + if thread.is_alive(): + msg = f"failed to join the thread in {timeout:.1f} seconds" + raise AssertionError(msg) + + + at contextlib.contextmanager +def start_threads(threads, unlock=None): + import faulthandler + threads = list(threads) + started = [] + try: + try: + for t in threads: + t.start() + started.append(t) + except: + if verbose: + print("Can't start %d threads, only %d threads started" % + (len(threads), len(started))) + raise + yield + finally: + try: + if unlock: + unlock() + endtime = starttime = time.monotonic() + for timeout in range(1, 16): + endtime += 60 + for t in started: + t.join(max(endtime - time.monotonic(), 0.01)) + started = [t for t in started if t.is_alive()] + if not started: + break + if verbose: + print('Unable to join %d threads during a period of ' + '%d minutes' % (len(started), timeout)) + finally: + started = [t for t in started if t.is_alive()] + if started: + faulthandler.dump_traceback(sys.stdout) + raise AssertionError('Unable to join %d threads' % len(started)) + + +class catch_threading_exception: + """ + Context manager catching threading.Thread exception using + threading.excepthook. + + Attributes set when an exception is catched: + + * exc_type + * exc_value + * exc_traceback + * thread + + See threading.excepthook() documentation for these attributes. + + These attributes are deleted at the context manager exit. + + Usage: + + with threading_helper.catch_threading_exception() as cm: + # code spawning a thread which raises an exception + ... + + # check the thread exception, use cm attributes: + # exc_type, exc_value, exc_traceback, thread + ... + + # exc_type, exc_value, exc_traceback, thread attributes of cm no longer + # exists at this point + # (to avoid reference cycles) + """ + + def __init__(self): + self.exc_type = None + self.exc_value = None + self.exc_traceback = None + self.thread = None + self._old_hook = None + + def _hook(self, args): + self.exc_type = args.exc_type + self.exc_value = args.exc_value + self.exc_traceback = args.exc_traceback + self.thread = args.thread + + def __enter__(self): + self._old_hook = threading.excepthook + threading.excepthook = self._hook + return self + + def __exit__(self, *exc_info): + threading.excepthook = self._old_hook + del self.exc_type + del self.exc_value + del self.exc_traceback + del self.thread diff --git a/Lib/test/test_asynchat.py b/Lib/test/test_asynchat.py index 004d368d76312..b32edddc7d550 100644 --- a/Lib/test/test_asynchat.py +++ b/Lib/test/test_asynchat.py @@ -2,6 +2,7 @@ from test import support from test.support import socket_helper +from test.support import threading_helper import asynchat import asyncore @@ -103,10 +104,10 @@ class TestAsynchat(unittest.TestCase): usepoll = False def setUp(self): - self._threads = support.threading_setup() + self._threads = threading_helper.threading_setup() def tearDown(self): - support.threading_cleanup(*self._threads) + threading_helper.threading_cleanup(*self._threads) def line_terminator_check(self, term, server_chunk): event = threading.Event() @@ -122,7 +123,7 @@ def line_terminator_check(self, term, server_chunk): c.push(b"I'm not dead yet!" + term) c.push(SERVER_QUIT) asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - support.join_thread(s) + threading_helper.join_thread(s) self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"]) @@ -153,7 +154,7 @@ def numeric_terminator_check(self, termlen): c.push(data) c.push(SERVER_QUIT) asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - support.join_thread(s) + threading_helper.join_thread(s) self.assertEqual(c.contents, [data[:termlen]]) @@ -173,7 +174,7 @@ def test_none_terminator(self): c.push(data) c.push(SERVER_QUIT) asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - support.join_thread(s) + threading_helper.join_thread(s) self.assertEqual(c.contents, []) self.assertEqual(c.buffer, data) @@ -185,7 +186,7 @@ def test_simple_producer(self): p = asynchat.simple_producer(data+SERVER_QUIT, buffer_size=8) c.push_with_producer(p) asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - support.join_thread(s) + threading_helper.join_thread(s) self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"]) @@ -195,7 +196,7 @@ def test_string_producer(self): data = b"hello world\nI'm not dead yet!\n" c.push_with_producer(data+SERVER_QUIT) asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - support.join_thread(s) + threading_helper.join_thread(s) self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"]) @@ -206,7 +207,7 @@ def test_empty_line(self): c.push(b"hello world\n\nI'm not dead yet!\n") c.push(SERVER_QUIT) asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - support.join_thread(s) + threading_helper.join_thread(s) self.assertEqual(c.contents, [b"hello world", b"", b"I'm not dead yet!"]) @@ -225,7 +226,7 @@ def test_close_when_done(self): # where the server echoes all of its data before we can check that it # got any down below. s.start_resend_event.set() - support.join_thread(s) + threading_helper.join_thread(s) self.assertEqual(c.contents, []) # the server might have been able to send a byte or two back, but this @@ -246,7 +247,7 @@ def test_push(self): self.assertRaises(TypeError, c.push, 'unicode') c.push(SERVER_QUIT) asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - support.join_thread(s) + threading_helper.join_thread(s) self.assertEqual(c.contents, [b'bytes', b'bytes', b'bytes']) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 0fb361884185e..e7324d2e4811b 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -33,6 +33,7 @@ from test.test_asyncio import utils as test_utils from test import support from test.support import socket_helper +from test.support import threading_helper from test.support import ALWAYS_EQ, LARGEST, SMALLEST @@ -706,7 +707,7 @@ def client(): proto.transport.close() lsock.close() - support.join_thread(thread) + threading_helper.join_thread(thread) self.assertFalse(thread.is_alive()) self.assertEqual(proto.state, 'CLOSED') self.assertEqual(proto.nbytes, len(message)) diff --git a/Lib/test/test_asyncio/utils.py b/Lib/test/test_asyncio/utils.py index 804db9166fe7f..34da7390e1b16 100644 --- a/Lib/test/test_asyncio/utils.py +++ b/Lib/test/test_asyncio/utils.py @@ -34,6 +34,7 @@ from asyncio import tasks from asyncio.log import logger from test import support +from test.support import threading_helper def data_file(filename): @@ -546,7 +547,7 @@ def unpatch_get_running_loop(self): def setUp(self): self._get_running_loop = events._get_running_loop events._get_running_loop = lambda: None - self._thread_cleanup = support.threading_setup() + self._thread_cleanup = threading_helper.threading_setup() def tearDown(self): self.unpatch_get_running_loop() @@ -558,7 +559,7 @@ def tearDown(self): self.assertEqual(sys.exc_info(), (None, None, None)) self.doCleanups() - support.threading_cleanup(*self._thread_cleanup) + threading_helper.threading_cleanup(*self._thread_cleanup) support.reap_children() diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py index 3c3abe4191788..2cee6fb2e996a 100644 --- a/Lib/test/test_asyncore.py +++ b/Lib/test/test_asyncore.py @@ -11,6 +11,7 @@ from test import support from test.support import socket_helper +from test.support import threading_helper from io import BytesIO if support.PGO: @@ -323,7 +324,7 @@ def setUp(self): def tearDown(self): asyncore.close_all() - @support.reap_threads + @threading_helper.reap_threads def test_send(self): evt = threading.Event() sock = socket.socket() @@ -360,7 +361,7 @@ def test_send(self): self.assertEqual(cap.getvalue(), data*2) finally: - support.join_thread(t) + threading_helper.join_thread(t) @unittest.skipUnless(hasattr(asyncore, 'file_wrapper'), @@ -766,7 +767,7 @@ def test_set_reuse_addr(self): self.assertTrue(s.socket.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR)) - @support.reap_threads + @threading_helper.reap_threads def test_quick_connect(self): # see: http://bugs.python.org/issue10340 if self.family not in (socket.AF_INET, getattr(socket, "AF_INET6", object())): @@ -788,7 +789,7 @@ def test_quick_connect(self): except OSError: pass finally: - support.join_thread(t) + threading_helper.join_thread(t) class TestAPI_UseIPv4Sockets(BaseTestAPI): family = socket.AF_INET diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py index 78b95d88faafa..91ccff2d0c07f 100644 --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -12,6 +12,7 @@ import shutil import subprocess import threading +from test.support import threading_helper from test.support import unlink import _compression import sys @@ -502,7 +503,7 @@ def comp(): for i in range(5): f.write(data) threads = [threading.Thread(target=comp) for i in range(nthreads)] - with support.start_threads(threads): + with threading_helper.start_threads(threads): pass def testMixedIterationAndReads(self): diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 5c7526aa7ec29..44693b8fdd717 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -17,6 +17,7 @@ import importlib.util from test import support from test.support import MISSING_C_DOCSTRINGS +from test.support import threading_helper from test.support.script_helper import assert_python_failure, assert_python_ok try: import _posixsubprocess @@ -575,7 +576,7 @@ class foo(object):pass threads = [threading.Thread(target=self.pendingcalls_thread, args=(context,)) for i in range(context.nThreads)] - with support.start_threads(threads): + with threading_helper.start_threads(threads): self.pendingcalls_wait(context.l, n, context) def pendingcalls_thread(self, context): @@ -634,7 +635,7 @@ def test_mutate_exception(self): class TestThreadState(unittest.TestCase): - @support.reap_threads + @threading_helper.reap_threads def test_thread_state(self): # some extra thread-state tests driven via _testcapi def target(): diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py index 40597ffee7378..3b74949a5f61d 100644 --- a/Lib/test/test_concurrent_futures.py +++ b/Lib/test/test_concurrent_futures.py @@ -1,4 +1,5 @@ from test import support +from test.support import threading_helper # Skip tests if _multiprocessing wasn't built. support.import_module('_multiprocessing') @@ -100,11 +101,11 @@ def make_dummy_object(_): class BaseTestCase(unittest.TestCase): def setUp(self): - self._thread_key = support.threading_setup() + self._thread_key = threading_helper.threading_setup() def tearDown(self): support.reap_children() - support.threading_cleanup(*self._thread_key) + threading_helper.threading_cleanup(*self._thread_key) class ExecutorMixin: @@ -1496,11 +1497,11 @@ def test_multiple_set_exception(self): def setUpModule(): global _threads_key - _threads_key = support.threading_setup() + _threads_key = threading_helper.threading_setup() def tearDownModule(): - support.threading_cleanup(*_threads_key) + threading_helper.threading_cleanup(*_threads_key) multiprocessing.util._cleanup_tests() diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py index 59eabb0092194..1d28e26dec681 100644 --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -37,7 +37,8 @@ from email import base64mime from email import quoprimime -from test.support import unlink, start_threads +from test.support import threading_helper +from test.support import unlink from test.test_email import openfile, TestEmailBase # These imports are documented to work, but we are testing them using a @@ -3241,7 +3242,7 @@ def run(self): append(make_msgid(domain='testdomain-string')) threads = [MsgidsThread() for i in range(5)] - with start_threads(threads): + with threading_helper.start_threads(threads): pass all_ids = sum([t.msgids for t in threads], []) self.assertEqual(len(set(all_ids)), len(all_ids)) diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py index 1df0313da0a7e..e7bad62406773 100644 --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -10,6 +10,7 @@ from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL from test import support from test.support import ALWAYS_EQ +from test.support import threading_helper from datetime import timedelta @@ -2333,7 +2334,7 @@ class Color(StrMixin, AllMixin, Flag): self.assertEqual(Color.ALL.value, 7) self.assertEqual(str(Color.BLUE), 'blue') - @support.reap_threads + @threading_helper.reap_threads def test_unique_composite(self): # override __eq__ to be identity only class TestFlag(Flag): @@ -2363,7 +2364,7 @@ def cycle_enum(): threading.Thread(target=cycle_enum) for _ in range(8) ] - with support.start_threads(threads): + with threading_helper.start_threads(threads): pass # check that only 248 members were created self.assertFalse( @@ -2751,7 +2752,7 @@ class Color(StrMixin, AllMixin, IntFlag): self.assertEqual(Color.ALL.value, 7) self.assertEqual(str(Color.BLUE), 'blue') - @support.reap_threads + @threading_helper.reap_threads def test_unique_composite(self): # override __eq__ to be identity only class TestFlag(IntFlag): @@ -2781,7 +2782,7 @@ def cycle_enum(): threading.Thread(target=cycle_enum) for _ in range(8) ] - with support.start_threads(threads): + with threading_helper.start_threads(threads): pass # check that only 248 members were created self.assertFalse( diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py index e424076d7d317..cb43573318b6a 100644 --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -19,6 +19,7 @@ from unittest import TestCase, skipUnless from test import support +from test.support import threading_helper from test.support import socket_helper from test.support.socket_helper import HOST, HOSTv6 @@ -1117,11 +1118,11 @@ def test_main(): TestTLS_FTPClassMixin, TestTLS_FTPClass, MiscTestCase] - thread_info = support.threading_setup() + thread_info = threading_helper.threading_setup() try: support.run_unittest(*tests) finally: - support.threading_cleanup(*thread_info) + threading_helper.threading_cleanup(*thread_info) if __name__ == '__main__': diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index e122fe0b33340..72b7765853bc0 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -19,6 +19,7 @@ from weakref import proxy import contextlib +from test.support import threading_helper from test.support.script_helper import assert_python_ok import functools @@ -1798,7 +1799,7 @@ def clear(): # create n threads in order to fill cache threads = [threading.Thread(target=full, args=[k]) for k in range(n)] - with support.start_threads(threads): + with threading_helper.start_threads(threads): start.set() hits, misses, maxsize, currsize = f.cache_info() @@ -1816,7 +1817,7 @@ def clear(): threads += [threading.Thread(target=full, args=[k]) for k in range(n)] start.clear() - with support.start_threads(threads): + with threading_helper.start_threads(threads): start.set() finally: sys.setswitchinterval(orig_si) @@ -1838,7 +1839,7 @@ def test(): self.assertEqual(f(i), 3 * i) stop.wait(10) threads = [threading.Thread(target=test) for k in range(n)] - with support.start_threads(threads): + with threading_helper.start_threads(threads): for i in range(m): start.wait(10) stop.reset() @@ -1858,7 +1859,7 @@ def test(i, x): self.assertEqual(f(x), 3 * x, i) threads = [threading.Thread(target=test, args=(i, v)) for i, v in enumerate([1, 2, 2, 3, 2])] - with support.start_threads(threads): + with threading_helper.start_threads(threads): pass def test_need_for_rlock(self): @@ -2792,7 +2793,7 @@ def test_threaded(self): threading.Thread(target=lambda: item.cost) for k in range(num_threads) ] - with support.start_threads(threads): + with threading_helper.start_threads(threads): go.set() finally: sys.setswitchinterval(orig_si) diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index acb6391944bc0..c82970827c672 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -1,10 +1,10 @@ import unittest import unittest.mock from test.support import (verbose, refcount_test, run_unittest, - cpython_only, start_threads, - temp_dir, TESTFN, unlink, + cpython_only, temp_dir, TESTFN, unlink, import_module) from test.support.script_helper import assert_python_ok, make_script +from test.support import threading_helper import gc import sys @@ -415,7 +415,7 @@ def run_thread(): for i in range(N_THREADS): t = threading.Thread(target=run_thread) threads.append(t) - with start_threads(threads, lambda: exit.append(1)): + with threading_helper.start_threads(threads, lambda: exit.append(1)): time.sleep(1.0) finally: sys.setswitchinterval(old_switchinterval) diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py index 6088307f8410b..2f79244748e68 100644 --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -19,6 +19,7 @@ import warnings from test import support from test.support import _4G, bigmemtest, import_fresh_module +from test.support import threading_helper from http.client import HTTPException # Were we compiled --with-pydebug or with #define Py_DEBUG? @@ -870,7 +871,7 @@ def test_gil(self): '1cfceca95989f51f658e3f3ffe7f1cd43726c9e088c13ee10b46f57cef135b94' ) - @support.reap_threads + @threading_helper.reap_threads def test_threaded_hashing(self): # Updating the same hash object from several threads at once # using data chunk sizes containing the same byte sequences. diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py index c442f5571a868..71a0511e53a72 100644 --- a/Lib/test/test_httpservers.py +++ b/Lib/test/test_httpservers.py @@ -30,6 +30,7 @@ import unittest from test import support +from test.support import threading_helper class NoLogRequestHandler: @@ -64,7 +65,7 @@ def stop(self): class BaseTestCase(unittest.TestCase): def setUp(self): - self._threads = support.threading_setup() + self._threads = threading_helper.threading_setup() os.environ = support.EnvironmentVarGuard() self.server_started = threading.Event() self.thread = TestServerThread(self, self.request_handler) @@ -75,7 +76,7 @@ def tearDown(self): self.thread.stop() self.thread = None os.environ.__exit__() - support.threading_cleanup(*self._threads) + threading_helper.threading_cleanup(*self._threads) def request(self, uri, method='GET', body=None, headers={}): self.connection = http.client.HTTPConnection(self.HOST, self.PORT) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index d1e3550868059..0fcc1fb99a289 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -10,9 +10,10 @@ import threading import socket -from test.support import (reap_threads, verbose, +from test.support import (verbose, run_with_tz, run_with_locale, cpython_only) from test.support import hashlib_helper +from test.support import threading_helper import unittest from unittest import mock from datetime import datetime, timezone, timedelta @@ -252,7 +253,7 @@ def _cleanup(self): # cleanup the server self.server.shutdown() self.server.server_close() - support.join_thread(self.thread) + threading_helper.join_thread(self.thread) # Explicitly clear the attribute to prevent dangling thread self.thread = None @@ -641,13 +642,13 @@ def reaped_pair(self, hdlr): finally: client.logout() - @reap_threads + @threading_helper.reap_threads def test_connect(self): with self.reaped_server(SimpleIMAPHandler) as server: client = self.imap_class(*server.server_address) client.shutdown() - @reap_threads + @threading_helper.reap_threads def test_bracket_flags(self): # This violates RFC 3501, which disallows ']' characters in tag names, @@ -696,7 +697,7 @@ def cmd_STORE(self, tag, args): typ, [data] = client.response('PERMANENTFLAGS') self.assertIn(b'[test]', data) - @reap_threads + @threading_helper.reap_threads def test_issue5949(self): class EOFHandler(socketserver.StreamRequestHandler): @@ -708,7 +709,7 @@ def handle(self): self.assertRaises(imaplib.IMAP4.abort, self.imap_class, *server.server_address) - @reap_threads + @threading_helper.reap_threads def test_line_termination(self): class BadNewlineHandler(SimpleIMAPHandler): @@ -732,7 +733,7 @@ def cmd_AUTHENTICATE(self, tag, args): self.server.response = yield self._send_tagged(tag, 'OK', 'FAKEAUTH successful') - @reap_threads + @threading_helper.reap_threads def test_enable_raises_error_if_not_AUTH(self): with self.reaped_pair(self.UTF8Server) as (server, client): self.assertFalse(client.utf8_enabled) @@ -741,14 +742,14 @@ def test_enable_raises_error_if_not_AUTH(self): # XXX Also need a test that enable after SELECT raises an error. - @reap_threads + @threading_helper.reap_threads def test_enable_raises_error_if_no_capability(self): class NoEnableServer(self.UTF8Server): capabilities = 'AUTH' with self.reaped_pair(NoEnableServer) as (server, client): self.assertRaises(imaplib.IMAP4.error, client.enable, 'foo') - @reap_threads + @threading_helper.reap_threads def test_enable_UTF8_raises_error_if_not_supported(self): class NonUTF8Server(SimpleIMAPHandler): pass @@ -759,7 +760,7 @@ class NonUTF8Server(SimpleIMAPHandler): client.enable('UTF8=ACCEPT') pass - @reap_threads + @threading_helper.reap_threads def test_enable_UTF8_True_append(self): class UTF8AppendServer(self.UTF8Server): @@ -789,7 +790,7 @@ def cmd_APPEND(self, tag, args): # XXX also need a test that makes sure that the Literal and Untagged_status # regexes uses unicode in UTF8 mode instead of the default ASCII. - @reap_threads + @threading_helper.reap_threads def test_search_disallows_charset_in_utf8_mode(self): with self.reaped_pair(self.UTF8Server) as (server, client): typ, _ = client.authenticate('MYAUTH', lambda x: b'fake') @@ -799,7 +800,7 @@ def test_search_disallows_charset_in_utf8_mode(self): self.assertTrue(client.utf8_enabled) self.assertRaises(imaplib.IMAP4.error, client.search, 'foo', 'bar') - @reap_threads + @threading_helper.reap_threads def test_bad_auth_name(self): class MyServer(SimpleIMAPHandler): @@ -812,7 +813,7 @@ def cmd_AUTHENTICATE(self, tag, args): with self.assertRaises(imaplib.IMAP4.error): client.authenticate('METHOD', lambda: 1) - @reap_threads + @threading_helper.reap_threads def test_invalid_authentication(self): class MyServer(SimpleIMAPHandler): @@ -826,7 +827,7 @@ def cmd_AUTHENTICATE(self, tag, args): with self.assertRaises(imaplib.IMAP4.error): code, data = client.authenticate('MYAUTH', lambda x: b'fake') - @reap_threads + @threading_helper.reap_threads def test_valid_authentication(self): class MyServer(SimpleIMAPHandler): @@ -848,7 +849,7 @@ def cmd_AUTHENTICATE(self, tag, args): self.assertEqual(server.response, b'ZmFrZQ==\r\n') # b64 encoded 'fake' - @reap_threads + @threading_helper.reap_threads @hashlib_helper.requires_hashdigest('md5') def test_login_cram_md5(self): @@ -877,7 +878,7 @@ def cmd_AUTHENTICATE(self, tag, args): self.assertEqual(ret, "OK") - @reap_threads + @threading_helper.reap_threads def test_aborted_authentication(self): class MyServer(SimpleIMAPHandler): @@ -906,14 +907,14 @@ def handle(self): self.assertRaises(imaplib.IMAP4.error, self.imap_class, *server.server_address) - @reap_threads + @threading_helper.reap_threads def test_simple_with_statement(self): # simplest call with self.reaped_server(SimpleIMAPHandler) as server: with self.imap_class(*server.server_address): pass - @reap_threads + @threading_helper.reap_threads def test_with_statement(self): with self.reaped_server(SimpleIMAPHandler) as server: with self.imap_class(*server.server_address) as imap: @@ -921,7 +922,7 @@ def test_with_statement(self): self.assertEqual(server.logged, 'user') self.assertIsNone(server.logged) - @reap_threads + @threading_helper.reap_threads def test_with_statement_logout(self): # what happens if already logout in the block? with self.reaped_server(SimpleIMAPHandler) as server: @@ -938,7 +939,7 @@ class ThreadedNetworkedTestsSSL(ThreadedNetworkedTests): server_class = SecureTCPServer imap_class = IMAP4_SSL - @reap_threads + @threading_helper.reap_threads def test_ssl_verified(self): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(CAFILE) diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index d50befc030a48..060d145970ee9 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -25,6 +25,7 @@ unlink, unload, cpython_only, TESTFN_UNENCODABLE, temp_dir, DirsOnSysPath) from test.support import script_helper +from test.support import threading_helper from test.test_importlib.util import uncache from types import ModuleType @@ -459,7 +460,7 @@ def run(): event = threading.Event() threads = [threading.Thread(target=run) for x in range(2)] try: - with test.support.start_threads(threads, event.set): + with threading_helper.start_threads(threads, event.set): time.sleep(0) finally: sys.modules.pop('package', None) diff --git a/Lib/test/test_importlib/test_locks.py b/Lib/test/test_importlib/test_locks.py index 21794d911ef69..0e94ce91801d6 100644 --- a/Lib/test/test_importlib/test_locks.py +++ b/Lib/test/test_importlib/test_locks.py @@ -7,6 +7,7 @@ import weakref from test import support +from test.support import threading_helper from test import lock_tests @@ -138,7 +139,7 @@ def test_all_locks(self): ) = test_util.test_both(LifetimeTests, init=init) - at support.reap_threads + at threading_helper.reap_threads def test_main(): support.run_unittest(Frozen_ModuleLockAsRLockTests, Source_ModuleLockAsRLockTests, diff --git a/Lib/test/test_importlib/test_threaded_import.py b/Lib/test/test_importlib/test_threaded_import.py index d1f64c70fac80..06da18ed396d9 100644 --- a/Lib/test/test_importlib/test_threaded_import.py +++ b/Lib/test/test_importlib/test_threaded_import.py @@ -15,8 +15,9 @@ import unittest from unittest import mock from test.support import ( - verbose, run_unittest, TESTFN, reap_threads, - forget, unlink, rmtree, start_threads) + verbose, run_unittest, TESTFN, + forget, unlink, rmtree) +from test.support import threading_helper def task(N, done, done_tasks, errors): try: @@ -124,9 +125,9 @@ def check_parallel_module_init(self, mock_os): done_tasks = [] done.clear() t0 = time.monotonic() - with start_threads(threading.Thread(target=task, - args=(N, done, done_tasks, errors,)) - for i in range(N)): + with threading_helper.start_threads( + threading.Thread(target=task, args=(N, done, done_tasks, errors,)) + for i in range(N)): pass completed = done.wait(10 * 60) dt = time.monotonic() - t0 @@ -245,7 +246,7 @@ def target(): del sys.modules[TESTFN] - at reap_threads + at threading_helper.reap_threads def test_main(): old_switchinterval = None try: diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index fe07b56880bbf..7b8511b66bf10 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -40,6 +40,7 @@ from test import support from test.support.script_helper import ( assert_python_ok, assert_python_failure, run_python_until_end) +from test.support import threading_helper from test.support import FakePath import codecs @@ -1472,7 +1473,7 @@ def f(): errors.append(e) raise threads = [threading.Thread(target=f) for x in range(20)] - with support.start_threads(threads): + with threading_helper.start_threads(threads): time.sleep(0.02) # yield self.assertFalse(errors, "the following exceptions were caught: %r" % errors) @@ -1836,7 +1837,7 @@ def f(): errors.append(e) raise threads = [threading.Thread(target=f) for x in range(20)] - with support.start_threads(threads): + with threading_helper.start_threads(threads): time.sleep(0.02) # yield self.assertFalse(errors, "the following exceptions were caught: %r" % errors) @@ -3270,7 +3271,7 @@ def run(n): f.write(text) threads = [threading.Thread(target=run, args=(x,)) for x in range(20)] - with support.start_threads(threads, event.set): + with threading_helper.start_threads(threads, event.set): time.sleep(0.02) with self.open(support.TESTFN) as f: content = f.read() diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 9a114451913e8..275ce2e45f169 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -43,6 +43,7 @@ from test.support.script_helper import assert_python_ok, assert_python_failure from test import support from test.support import socket_helper +from test.support import threading_helper from test.support.logging_helper import TestHandler import textwrap import threading @@ -79,7 +80,7 @@ class BaseTest(unittest.TestCase): def setUp(self): """Setup the default logging stream to an internal StringIO instance, so that we can examine log output as we want.""" - self._threading_key = support.threading_setup() + self._threading_key = threading_helper.threading_setup() logger_dict = logging.getLogger().manager.loggerDict logging._acquireLock() @@ -150,7 +151,7 @@ def tearDown(self): logging._releaseLock() self.doCleanups() - support.threading_cleanup(*self._threading_key) + threading_helper.threading_cleanup(*self._threading_key) def assert_log_lines(self, expected_values, stream=None, pat=None): """Match the collected log lines against the regular expression @@ -865,7 +866,7 @@ def stop(self): Wait for the server thread to terminate. """ self.close() - support.join_thread(self._thread) + threading_helper.join_thread(self._thread) self._thread = None asyncore.close_all(map=self._map, ignore_all=True) @@ -915,7 +916,7 @@ def stop(self): """ self.shutdown() if self._thread is not None: - support.join_thread(self._thread) + threading_helper.join_thread(self._thread) self._thread = None self.server_close() self.ready.clear() @@ -3212,7 +3213,7 @@ def setup_via_listener(self, text, verify=None): finally: t.ready.wait(2.0) logging.config.stopListening() - support.join_thread(t) + threading_helper.join_thread(t) def test_listen_config_10_ok(self): with support.captured_stdout() as output: diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 0db7d30f6385e..7d4376aed89bd 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -31,6 +31,7 @@ import warnings from test import support from test.support import socket_helper +from test.support import threading_helper from platform import win32_is_iot try: @@ -3163,12 +3164,12 @@ class TestSendfile(unittest.TestCase): @classmethod def setUpClass(cls): - cls.key = support.threading_setup() + cls.key = threading_helper.threading_setup() create_file(support.TESTFN, cls.DATA) @classmethod def tearDownClass(cls): - support.threading_cleanup(*cls.key) + threading_helper.threading_cleanup(*cls.key) support.unlink(support.TESTFN) def setUp(self): diff --git a/Lib/test/test_poll.py b/Lib/test/test_poll.py index ef966bf0f5608..a14c69a5723a2 100644 --- a/Lib/test/test_poll.py +++ b/Lib/test/test_poll.py @@ -7,7 +7,8 @@ import threading import time import unittest -from test.support import TESTFN, run_unittest, reap_threads, cpython_only +from test.support import TESTFN, run_unittest, cpython_only +from test.support import threading_helper try: select.poll @@ -175,7 +176,7 @@ def test_poll_c_limits(self): self.assertRaises(OverflowError, pollster.poll, INT_MAX + 1) self.assertRaises(OverflowError, pollster.poll, UINT_MAX + 1) - @reap_threads + @threading_helper.reap_threads def test_threaded_poll(self): r, w = os.pipe() self.addCleanup(os.close, r) @@ -204,7 +205,7 @@ def test_threaded_poll(self): t.join() @unittest.skipUnless(threading, 'Threading required for this test.') - @reap_threads + @threading_helper.reap_threads def test_poll_blocks_with_negative_ms(self): for timeout_ms in [None, -1000, -1, -1.0, -0.1, -1e-100]: # Create two file descriptors. This will be used to unlock diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py index b670afcf4e62e..2ac345ddd68a9 100644 --- a/Lib/test/test_poplib.py +++ b/Lib/test/test_poplib.py @@ -15,6 +15,7 @@ from test import support as test_support from test.support import hashlib_helper from test.support import socket_helper +from test.support import threading_helper HOST = socket_helper.HOST PORT = 0 @@ -536,11 +537,11 @@ def testTimeoutValue(self): def test_main(): tests = [TestPOP3Class, TestTimeouts, TestPOP3_SSLClass, TestPOP3_TLSClass] - thread_info = test_support.threading_setup() + thread_info = threading_helper.threading_setup() try: test_support.run_unittest(*tests) finally: - test_support.threading_cleanup(*thread_info) + threading_helper.threading_cleanup(*thread_info) if __name__ == '__main__': diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py index ffabb7f1b9407..f0d7ffd562c9d 100644 --- a/Lib/test/test_pydoc.py +++ b/Lib/test/test_pydoc.py @@ -24,9 +24,10 @@ from io import StringIO from collections import namedtuple from test.support.script_helper import assert_python_ok +from test.support import threading_helper from test.support import ( TESTFN, rmtree, - reap_children, reap_threads, captured_output, captured_stdout, + reap_children, captured_output, captured_stdout, captured_stderr, unlink, requires_docstrings ) from test import pydoc_mod @@ -1575,7 +1576,7 @@ def test_sys_path_adjustment_when_curdir_already_included(self): self.assertIsNone(self._get_revised_path(trailing_argv0dir)) - at reap_threads + at threading_helper.reap_threads def test_main(): try: test.support.run_unittest(PydocDocTest, diff --git a/Lib/test/test_queue.py b/Lib/test/test_queue.py index d88e28a9146ef..7b23699a00f1d 100644 --- a/Lib/test/test_queue.py +++ b/Lib/test/test_queue.py @@ -7,6 +7,7 @@ import unittest import weakref from test import support +from test.support import threading_helper py_queue = support.import_fresh_module('queue', blocked=['_queue']) c_queue = support.import_fresh_module('queue', fresh=['_queue']) @@ -63,7 +64,7 @@ def do_blocking_test(self, block_func, block_args, trigger_func, trigger_args): block_func) return self.result finally: - support.join_thread(thread) # make sure the thread terminates + threading_helper.join_thread(thread) # make sure the thread terminates # Call this instead if block_func is supposed to raise an exception. def do_exceptional_blocking_test(self,block_func, block_args, trigger_func, @@ -79,7 +80,7 @@ def do_exceptional_blocking_test(self,block_func, block_args, trigger_func, self.fail("expected exception of kind %r" % expected_exception_class) finally: - support.join_thread(thread) # make sure the thread terminates + threading_helper.join_thread(thread) # make sure the thread terminates if not thread.startedEvent.is_set(): self.fail("trigger thread ended but event never set") @@ -484,7 +485,7 @@ def wrapper(*args, **kwargs): args=(q, results, sentinel)) for i in range(n_consumers)] - with support.start_threads(feeders + consumers): + with threading_helper.start_threads(feeders + consumers): pass self.assertFalse(exceptions) diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py index a3112b8fdf473..b0bed431d4b05 100644 --- a/Lib/test/test_robotparser.py +++ b/Lib/test/test_robotparser.py @@ -5,6 +5,7 @@ import urllib.robotparser from test import support from test.support import socket_helper +from test.support import threading_helper from http.server import BaseHTTPRequestHandler, HTTPServer @@ -330,7 +331,7 @@ def tearDown(self): self.t.join() self.server.server_close() - @support.reap_threads + @threading_helper.reap_threads def testPasswordProtectedSite(self): addr = self.server.server_address url = 'http://' + socket_helper.HOST + ':' + str(addr[1]) diff --git a/Lib/test/test_sched.py b/Lib/test/test_sched.py index 26cb4be81e5e4..491d7b3a745b4 100644 --- a/Lib/test/test_sched.py +++ b/Lib/test/test_sched.py @@ -4,6 +4,7 @@ import time import unittest from test import support +from test.support import threading_helper TIMEOUT = support.SHORT_TIMEOUT @@ -82,7 +83,7 @@ def test_enter_concurrent(self): self.assertEqual(q.get(timeout=TIMEOUT), 5) self.assertTrue(q.empty()) timer.advance(1000) - support.join_thread(t) + threading_helper.join_thread(t) self.assertTrue(q.empty()) self.assertEqual(timer.time(), 5) @@ -137,7 +138,7 @@ def test_cancel_concurrent(self): self.assertEqual(q.get(timeout=TIMEOUT), 4) self.assertTrue(q.empty()) timer.advance(1000) - support.join_thread(t) + threading_helper.join_thread(t) self.assertTrue(q.empty()) self.assertEqual(timer.time(), 4) diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py index 576299900318d..7816ed34886e9 100644 --- a/Lib/test/test_smtplib.py +++ b/Lib/test/test_smtplib.py @@ -22,7 +22,7 @@ from test import support, mock_socket from test.support import hashlib_helper from test.support import socket_helper -from test.support import threading_setup, threading_cleanup, join_thread +from test.support import threading_helper from unittest.mock import Mock HOST = socket_helper.HOST @@ -217,7 +217,7 @@ class DebuggingServerTests(unittest.TestCase): maxDiff = None def setUp(self): - self.thread_key = threading_setup() + self.thread_key = threading_helper.threading_setup() self.real_getfqdn = socket.getfqdn socket.getfqdn = mock_socket.getfqdn # temporarily replace sys.stdout to capture DebuggingServer output @@ -249,7 +249,7 @@ def tearDown(self): self.client_evt.set() # wait for the server thread to terminate self.serv_evt.wait() - join_thread(self.thread) + threading_helper.join_thread(self.thread) # restore sys.stdout sys.stdout = self.old_stdout # restore DEBUGSTREAM @@ -257,7 +257,7 @@ def tearDown(self): smtpd.DEBUGSTREAM = self.old_DEBUGSTREAM del self.thread self.doCleanups() - threading_cleanup(*self.thread_key) + threading_helper.threading_cleanup(*self.thread_key) def get_output_without_xpeer(self): test_output = self.output.getvalue() @@ -704,7 +704,7 @@ class TooLongLineTests(unittest.TestCase): respdata = b'250 OK' + (b'.' * smtplib._MAXLINE * 2) + b'\n' def setUp(self): - self.thread_key = threading_setup() + self.thread_key = threading_helper.threading_setup() self.old_stdout = sys.stdout self.output = io.StringIO() sys.stdout = self.output @@ -722,10 +722,10 @@ def setUp(self): def tearDown(self): self.evt.wait() sys.stdout = self.old_stdout - join_thread(self.thread) + threading_helper.join_thread(self.thread) del self.thread self.doCleanups() - threading_cleanup(*self.thread_key) + threading_helper.threading_cleanup(*self.thread_key) def testLineTooLong(self): self.assertRaises(smtplib.SMTPResponseException, smtplib.SMTP, @@ -955,7 +955,7 @@ def handle_error(self): class SMTPSimTests(unittest.TestCase): def setUp(self): - self.thread_key = threading_setup() + self.thread_key = threading_helper.threading_setup() self.real_getfqdn = socket.getfqdn socket.getfqdn = mock_socket.getfqdn self.serv_evt = threading.Event() @@ -978,10 +978,10 @@ def tearDown(self): self.client_evt.set() # wait for the server thread to terminate self.serv_evt.wait() - join_thread(self.thread) + threading_helper.join_thread(self.thread) del self.thread self.doCleanups() - threading_cleanup(*self.thread_key) + threading_helper.threading_cleanup(*self.thread_key) def testBasic(self): # smoke test @@ -1268,7 +1268,7 @@ class SMTPUTF8SimTests(unittest.TestCase): maxDiff = None def setUp(self): - self.thread_key = threading_setup() + self.thread_key = threading_helper.threading_setup() self.real_getfqdn = socket.getfqdn socket.getfqdn = mock_socket.getfqdn self.serv_evt = threading.Event() @@ -1293,10 +1293,10 @@ def tearDown(self): self.client_evt.set() # wait for the server thread to terminate self.serv_evt.wait() - join_thread(self.thread) + threading_helper.join_thread(self.thread) del self.thread self.doCleanups() - threading_cleanup(*self.thread_key) + threading_helper.threading_cleanup(*self.thread_key) def test_test_server_supports_extensions(self): smtp = smtplib.SMTP( @@ -1397,7 +1397,7 @@ class SimSMTPAUTHInitialResponseServer(SimSMTPServer): class SMTPAUTHInitialResponseSimTests(unittest.TestCase): def setUp(self): - self.thread_key = threading_setup() + self.thread_key = threading_helper.threading_setup() self.real_getfqdn = socket.getfqdn socket.getfqdn = mock_socket.getfqdn self.serv_evt = threading.Event() @@ -1421,10 +1421,10 @@ def tearDown(self): self.client_evt.set() # wait for the server thread to terminate self.serv_evt.wait() - join_thread(self.thread) + threading_helper.join_thread(self.thread) del self.thread self.doCleanups() - threading_cleanup(*self.thread_key) + threading_helper.threading_cleanup(*self.thread_key) def testAUTH_PLAIN_initial_response_login(self): self.serv.add_feature('AUTH PLAIN') diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index dc1330735df10..cff07b46c7a2a 100755 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -1,6 +1,7 @@ import unittest from test import support from test.support import socket_helper +from test.support import threading_helper import errno import io @@ -336,7 +337,7 @@ def serverExplicitReady(self): self.server_ready.set() def _setUp(self): - self.wait_threads = support.wait_threads_exit() + self.wait_threads = threading_helper.wait_threads_exit() self.wait_threads.__enter__() self.server_ready = threading.Event() @@ -6665,9 +6666,9 @@ def test_main(): ]) tests.append(TestMSWindowsTCPFlags) - thread_info = support.threading_setup() + thread_info = threading_helper.threading_setup() support.run_unittest(*tests) - support.threading_cleanup(*thread_info) + threading_helper.threading_cleanup(*thread_info) if __name__ == "__main__": diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py index c663cc95889c9..5db8cec567afb 100644 --- a/Lib/test/test_socketserver.py +++ b/Lib/test/test_socketserver.py @@ -14,8 +14,9 @@ import socketserver import test.support -from test.support import reap_children, reap_threads, verbose +from test.support import reap_children, verbose from test.support import socket_helper +from test.support import threading_helper test.support.requires("network") @@ -120,7 +121,7 @@ def handle(self): self.assertEqual(server.server_address, server.socket.getsockname()) return server - @reap_threads + @threading_helper.reap_threads def run_server(self, svrcls, hdlrbase, testfunc): server = self.make_server(self.pickaddr(svrcls.address_family), svrcls, hdlrbase) @@ -249,7 +250,7 @@ def test_ForkingUnixDatagramServer(self): socketserver.DatagramRequestHandler, self.dgram_examine) - @reap_threads + @threading_helper.reap_threads def test_shutdown(self): # Issue #2302: shutdown() should always succeed in making an # other thread leave serve_forever(). diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index 5d496c6687614..ecb6049a6750f 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -5,6 +5,7 @@ import unittest.mock from test import support from test.support import socket_helper +from test.support import threading_helper import socket import select import time @@ -4429,7 +4430,7 @@ def test_pha_required_nocert(self): # Ignore expected SSLError in ConnectionHandler of ThreadedEchoServer # (it is only raised sometimes on Windows) - with support.catch_threading_exception() as cm: + with threading_helper.catch_threading_exception() as cm: server = ThreadedEchoServer(context=server_context, chatty=False) with server: with client_context.wrap_socket(socket.socket(), @@ -4750,11 +4751,11 @@ def test_main(verbose=False): if support.is_resource_enabled('network'): tests.append(NetworkedTests) - thread_info = support.threading_setup() + thread_info = threading_helper.threading_setup() try: support.run_unittest(*tests) finally: - support.threading_cleanup(*thread_info) + threading_helper.threading_cleanup(*thread_info) if __name__ == "__main__": test_main() diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 33b34593a0af9..2f93eaae560db 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1,5 +1,3 @@ -from test import support -from test.support.script_helper import assert_python_ok, assert_python_failure import builtins import codecs import gc @@ -11,6 +9,9 @@ import sys import sysconfig import test.support +from test import support +from test.support.script_helper import assert_python_ok, assert_python_failure +from test.support import threading_helper import textwrap import unittest import warnings @@ -365,7 +366,7 @@ def test_getframe(self): ) # sys._current_frames() is a CPython-only gimmick. - @test.support.reap_threads + @threading_helper.reap_threads def test_current_frames(self): import threading import traceback diff --git a/Lib/test/test_thread.py b/Lib/test/test_thread.py index 77e46f2c2f15a..62b57fa338837 100644 --- a/Lib/test/test_thread.py +++ b/Lib/test/test_thread.py @@ -2,6 +2,7 @@ import unittest import random from test import support +from test.support import threading_helper import _thread as thread import time import weakref @@ -32,8 +33,8 @@ def setUp(self): self.running = 0 self.next_ident = 0 - key = support.threading_setup() - self.addCleanup(support.threading_cleanup, *key) + key = threading_helper.threading_setup() + self.addCleanup(threading_helper.threading_cleanup, *key) class ThreadRunningTests(BasicThreadTest): @@ -58,7 +59,7 @@ def task(self, ident): self.done_mutex.release() def test_starting_threads(self): - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): # Basic test for thread creation. for i in range(NUMTASKS): self.newtask() @@ -94,7 +95,7 @@ def test_nt_and_posix_stack_size(self): verbose_print("trying stack_size = (%d)" % tss) self.next_ident = 0 self.created = 0 - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): for i in range(NUMTASKS): self.newtask() @@ -116,7 +117,7 @@ def task(): mut.acquire() mut.release() - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): thread.start_new_thread(task, ()) while not started: time.sleep(POLL_SLEEP) @@ -140,7 +141,7 @@ def task(): started = thread.allocate_lock() with support.catch_unraisable_exception() as cm: - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): started.acquire() thread.start_new_thread(task, ()) started.acquire() @@ -180,7 +181,7 @@ def enter(self): class BarrierTest(BasicThreadTest): def test_barrier(self): - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): self.bar = Barrier(NUMTASKS) self.running = NUMTASKS for i in range(NUMTASKS): @@ -223,7 +224,7 @@ def setUp(self): self.read_fd, self.write_fd = os.pipe() @unittest.skipUnless(hasattr(os, 'fork'), 'need os.fork') - @support.reap_threads + @threading_helper.reap_threads def test_forkinthread(self): pid = None @@ -243,7 +244,7 @@ def fork_thread(read_fd, write_fd): finally: os._exit(0) - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): thread.start_new_thread(fork_thread, (self.read_fd, self.write_fd)) self.assertEqual(os.read(self.read_fd, 2), b"OK") os.close(self.write_fd) diff --git a/Lib/test/test_threadedtempfile.py b/Lib/test/test_threadedtempfile.py index e1d7a10179cc1..fe63c9e91437b 100644 --- a/Lib/test/test_threadedtempfile.py +++ b/Lib/test/test_threadedtempfile.py @@ -15,7 +15,7 @@ import tempfile -from test.support import start_threads +from test.support import threading_helper import unittest import io import threading @@ -50,7 +50,7 @@ def run(self): class ThreadedTempFileTest(unittest.TestCase): def test_main(self): threads = [TempFileGreedy() for i in range(NUM_THREADS)] - with start_threads(threads, startEvent.set): + with threading_helper.start_threads(threads, startEvent.set): pass ok = sum(t.ok_count for t in threads) errors = [str(t.name) + str(t.errors.getvalue()) diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index 81e5f70d6d6ae..ad82e304e32f3 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -3,6 +3,7 @@ """ import test.support +from test.support import threading_helper from test.support import verbose, import_module, cpython_only from test.support.script_helper import assert_python_ok, assert_python_failure @@ -75,10 +76,10 @@ def run(self): class BaseTestCase(unittest.TestCase): def setUp(self): - self._threads = test.support.threading_setup() + self._threads = threading_helper.threading_setup() def tearDown(self): - test.support.threading_cleanup(*self._threads) + threading_helper.threading_cleanup(*self._threads) test.support.reap_children() @@ -130,7 +131,7 @@ def f(): done.set() done = threading.Event() ident = [] - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): tid = _thread.start_new_thread(f, ()) done.wait() self.assertEqual(ident[0], tid) @@ -171,7 +172,7 @@ def f(mutex): mutex = threading.Lock() mutex.acquire() - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): tid = _thread.start_new_thread(f, (mutex,)) # Wait for the thread to finish. mutex.acquire() diff --git a/Lib/test/test_threading_local.py b/Lib/test/test_threading_local.py index 2fd14ae2e16f3..9862094eaccd8 100644 --- a/Lib/test/test_threading_local.py +++ b/Lib/test/test_threading_local.py @@ -2,6 +2,7 @@ import unittest from doctest import DocTestSuite from test import support +from test.support import threading_helper import weakref import gc @@ -65,8 +66,8 @@ def f(i): # Simply check that the variable is correctly set self.assertEqual(local.x, i) - with support.start_threads(threading.Thread(target=f, args=(i,)) - for i in range(10)): + with threading_helper.start_threads(threading.Thread(target=f, args=(i,)) + for i in range(10)): pass def test_derived_cycle_dealloc(self): diff --git a/Lib/test/test_threadsignals.py b/Lib/test/test_threadsignals.py index eeacd3698cb13..15e8078e93662 100644 --- a/Lib/test/test_threadsignals.py +++ b/Lib/test/test_threadsignals.py @@ -5,6 +5,7 @@ import os import sys from test import support +from test.support import threading_helper import _thread as thread import time @@ -39,7 +40,7 @@ def send_signals(): class ThreadSignals(unittest.TestCase): def test_signals(self): - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): # Test signal handling semantics of threads. # We spawn a thread, have the thread send two signals, and # wait for it to finish. Check that we got both signals @@ -129,7 +130,7 @@ def test_rlock_acquire_interruption(self): def other_thread(): rlock.acquire() - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): thread.start_new_thread(other_thread, ()) # Wait until we can't acquire it without blocking... while rlock.acquire(blocking=False): @@ -165,7 +166,7 @@ def other_thread(): time.sleep(0.5) lock.release() - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): thread.start_new_thread(other_thread, ()) # Wait until we can't acquire it without blocking... while lock.acquire(blocking=False): @@ -212,7 +213,7 @@ def send_signals(): os.kill(process_pid, signal.SIGUSR1) done.release() - with support.wait_threads_exit(): + with threading_helper.wait_threads_exit(): # Send the signals from the non-main thread, since the main thread # is the only one that can process signals. thread.start_new_thread(send_signals, ()) diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py index ed426b05a7198..e568cc4575549 100644 --- a/Lib/test/test_urllib2_localnet.py +++ b/Lib/test/test_urllib2_localnet.py @@ -10,6 +10,7 @@ from test import support from test.support import hashlib_helper +from test.support import threading_helper try: import ssl @@ -666,11 +667,11 @@ def setUpModule(): # Store the threading_setup in a key and ensure that it is cleaned up # in the tearDown global threads_key - threads_key = support.threading_setup() + threads_key = threading_helper.threading_setup() def tearDownModule(): if threads_key: - support.threading_cleanup(*threads_key) + threading_helper.threading_cleanup(*threads_key) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py index f68af527eae85..79f702d0a75d3 100644 --- a/Lib/test/test_xmlrpc.py +++ b/Lib/test/test_xmlrpc.py @@ -16,6 +16,7 @@ import contextlib from test import support from test.support import socket_helper +from test.support import threading_helper from test.support import ALWAYS_EQ, LARGEST, SMALLEST try: @@ -1464,7 +1465,7 @@ def test_xmlrpcserver_has_use_builtin_types_flag(self): self.assertTrue(server.use_builtin_types) - at support.reap_threads + at threading_helper.reap_threads def test_main(): support.run_unittest(XMLRPCTestCase, HelperTestCase, DateTimeTestCase, BinaryTestCase, FaultTestCase, UseBuiltinTypesTestCase, From webhook-mailer at python.org Wed May 27 18:38:19 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 27 May 2020 22:38:19 -0000 Subject: [Python-checkins] bpo-40795: ctypes calls unraisablehook with an exception (GH-20452) Message-ID: https://github.com/python/cpython/commit/10228bad0452d94e66c964b625a0b61befa08e59 commit: 10228bad0452d94e66c964b625a0b61befa08e59 branch: master author: Victor Stinner committer: GitHub date: 2020-05-28T00:38:12+02:00 summary: bpo-40795: ctypes calls unraisablehook with an exception (GH-20452) If ctypes fails to convert the result of a callback or if a ctypes callback function raises an exception, sys.unraisablehook is now called with an exception set. Previously, the error was logged into stderr by PyErr_Print(). files: A Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst M Lib/ctypes/test/test_callbacks.py M Lib/ctypes/test/test_random_things.py M Lib/ctypes/test/test_unaligned_structures.py M Modules/_ctypes/callbacks.c M Modules/_ctypes/callproc.c diff --git a/Lib/ctypes/test/test_callbacks.py b/Lib/ctypes/test/test_callbacks.py index 937a06d981b00..d8e9c5a760e2c 100644 --- a/Lib/ctypes/test/test_callbacks.py +++ b/Lib/ctypes/test/test_callbacks.py @@ -1,5 +1,7 @@ import functools import unittest +from test import support + from ctypes import * from ctypes.test import need_symbol import _ctypes_test @@ -301,8 +303,22 @@ def func(*args): with self.assertRaises(ArgumentError): cb(*args2) + def test_convert_result_error(self): + def func(): + return ("tuple",) + + proto = CFUNCTYPE(c_int) + ctypes_func = proto(func) + with support.catch_unraisable_exception() as cm: + # don't test the result since it is an uninitialized value + result = ctypes_func() + + self.assertIsInstance(cm.unraisable.exc_value, TypeError) + self.assertEqual(cm.unraisable.err_msg, + "Exception ignored on converting result " + "of ctypes callback function") + self.assertIs(cm.unraisable.object, func) -################################################################ if __name__ == '__main__': unittest.main() diff --git a/Lib/ctypes/test/test_random_things.py b/Lib/ctypes/test/test_random_things.py index ee5b2128ea0fa..2988e275cf4bb 100644 --- a/Lib/ctypes/test/test_random_things.py +++ b/Lib/ctypes/test/test_random_things.py @@ -1,5 +1,9 @@ from ctypes import * -import unittest, sys +import contextlib +from test import support +import unittest +import sys + def callback_func(arg): 42 / arg @@ -34,41 +38,40 @@ class CallbackTracbackTestCase(unittest.TestCase): # created, then a full traceback printed. When SystemExit is # raised in a callback function, the interpreter exits. - def capture_stderr(self, func, *args, **kw): - # helper - call function 'func', and return the captured stderr - import io - old_stderr = sys.stderr - logger = sys.stderr = io.StringIO() - try: - func(*args, **kw) - finally: - sys.stderr = old_stderr - return logger.getvalue() + @contextlib.contextmanager + def expect_unraisable(self, exc_type, exc_msg=None): + with support.catch_unraisable_exception() as cm: + yield + + self.assertIsInstance(cm.unraisable.exc_value, exc_type) + if exc_msg is not None: + self.assertEqual(str(cm.unraisable.exc_value), exc_msg) + self.assertEqual(cm.unraisable.err_msg, + "Exception ignored on calling ctypes " + "callback function") + self.assertIs(cm.unraisable.object, callback_func) def test_ValueError(self): cb = CFUNCTYPE(c_int, c_int)(callback_func) - out = self.capture_stderr(cb, 42) - self.assertEqual(out.splitlines()[-1], - "ValueError: 42") + with self.expect_unraisable(ValueError, '42'): + cb(42) def test_IntegerDivisionError(self): cb = CFUNCTYPE(c_int, c_int)(callback_func) - out = self.capture_stderr(cb, 0) - self.assertEqual(out.splitlines()[-1][:19], - "ZeroDivisionError: ") + with self.expect_unraisable(ZeroDivisionError): + cb(0) def test_FloatDivisionError(self): cb = CFUNCTYPE(c_int, c_double)(callback_func) - out = self.capture_stderr(cb, 0.0) - self.assertEqual(out.splitlines()[-1][:19], - "ZeroDivisionError: ") + with self.expect_unraisable(ZeroDivisionError): + cb(0.0) def test_TypeErrorDivisionError(self): cb = CFUNCTYPE(c_int, c_char_p)(callback_func) - out = self.capture_stderr(cb, b"spam") - self.assertEqual(out.splitlines()[-1], - "TypeError: " - "unsupported operand type(s) for /: 'int' and 'bytes'") + err_msg = "unsupported operand type(s) for /: 'int' and 'bytes'" + with self.expect_unraisable(TypeError, err_msg): + cb(b"spam") + if __name__ == '__main__': unittest.main() diff --git a/Lib/ctypes/test/test_unaligned_structures.py b/Lib/ctypes/test/test_unaligned_structures.py index bcacfc8184b43..ee7fb45809bf7 100644 --- a/Lib/ctypes/test/test_unaligned_structures.py +++ b/Lib/ctypes/test/test_unaligned_structures.py @@ -27,7 +27,6 @@ class Y(SwappedStructure): class TestStructures(unittest.TestCase): def test_native(self): for typ in structures: -## print typ.value self.assertEqual(typ.value.offset, 1) o = typ() o.value = 4 @@ -35,7 +34,6 @@ def test_native(self): def test_swapped(self): for typ in byteswapped_structures: -## print >> sys.stderr, typ.value self.assertEqual(typ.value.offset, 1) o = typ() o.value = 4 diff --git a/Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst b/Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst new file mode 100644 index 0000000000000..dd02fb05cab5e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst @@ -0,0 +1,4 @@ +:mod:`ctypes` module: If ctypes fails to convert the result of a callback or +if a ctypes callback function raises an exception, sys.unraisablehook is now +called with an exception set. Previously, the error was logged into stderr +by :c:func:`PyErr_Print`. diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index 2b903c98e8e8d..29e8fac8c9496 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -213,9 +213,6 @@ static void _CallPythonObject(void *mem, pArgs++; } -#define CHECK(what, x) \ -if (x == NULL) _PyTraceback_Add(what, "_ctypes/callbacks.c", __LINE__ - 1), PyErr_Print() - if (flags & (FUNCFLAG_USE_ERRNO | FUNCFLAG_USE_LASTERROR)) { error_object = _ctypes_get_errobj(&space); if (error_object == NULL) @@ -235,7 +232,10 @@ if (x == NULL) _PyTraceback_Add(what, "_ctypes/callbacks.c", __LINE__ - 1), PyEr } result = PyObject_CallObject(callable, arglist); - CHECK("'calling callback function'", result); + if (result == NULL) { + _PyErr_WriteUnraisableMsg("on calling ctypes callback function", + callable); + } #ifdef MS_WIN32 if (flags & FUNCFLAG_USE_LASTERROR) { @@ -251,16 +251,17 @@ if (x == NULL) _PyTraceback_Add(what, "_ctypes/callbacks.c", __LINE__ - 1), PyEr } Py_XDECREF(error_object); - if ((restype != &ffi_type_void) && result) { - PyObject *keep; + if (restype != &ffi_type_void && result) { assert(setfunc); + #ifdef WORDS_BIGENDIAN - /* See the corresponding code in callproc.c, around line 961 */ - if (restype->type != FFI_TYPE_FLOAT && restype->size < sizeof(ffi_arg)) + /* See the corresponding code in _ctypes_callproc(): + in callproc.c, around line 1219. */ + if (restype->type != FFI_TYPE_FLOAT && restype->size < sizeof(ffi_arg)) { mem = (char *)mem + sizeof(ffi_arg) - restype->size; + } #endif - keep = setfunc(mem, result, 0); - CHECK("'converting callback result'", keep); + /* keep is an object we have to keep alive so that the result stays valid. If there is no such object, the setfunc will have returned Py_None. @@ -270,18 +271,32 @@ if (x == NULL) _PyTraceback_Add(what, "_ctypes/callbacks.c", __LINE__ - 1), PyEr be the result. EXCEPT when restype is py_object - Python itself knows how to manage the refcount of these objects. */ - if (keep == NULL) /* Could not convert callback result. */ - PyErr_WriteUnraisable(callable); - else if (keep == Py_None) /* Nothing to keep */ + PyObject *keep = setfunc(mem, result, 0); + + if (keep == NULL) { + /* Could not convert callback result. */ + _PyErr_WriteUnraisableMsg("on converting result " + "of ctypes callback function", + callable); + } + else if (keep == Py_None) { + /* Nothing to keep */ Py_DECREF(keep); + } else if (setfunc != _ctypes_get_fielddesc("O")->setfunc) { if (-1 == PyErr_WarnEx(PyExc_RuntimeWarning, "memory leak in callback function.", 1)) - PyErr_WriteUnraisable(callable); + { + _PyErr_WriteUnraisableMsg("on converting result " + "of ctypes callback function", + callable); + } } } + Py_XDECREF(result); + Done: Py_XDECREF(arglist); PyGILState_Release(state); diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 9bc28c260717d..af6e1e8ce0b75 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1231,7 +1231,9 @@ PyObject *_ctypes_callproc(PPROC pProc, if (rtype->type != FFI_TYPE_FLOAT && rtype->type != FFI_TYPE_STRUCT && rtype->size < sizeof(ffi_arg)) + { resbuf = (char *)resbuf + sizeof(ffi_arg) - rtype->size; + } #endif #ifdef MS_WIN32 From webhook-mailer at python.org Wed May 27 18:44:30 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 27 May 2020 22:44:30 -0000 Subject: [Python-checkins] bpo-40275: test.support.check_impl_detail() uses sys.implementation (GH-20468) Message-ID: https://github.com/python/cpython/commit/b0461e19b5ecb2d89917b23efb5ce1048fab1b22 commit: b0461e19b5ecb2d89917b23efb5ce1048fab1b22 branch: master author: Victor Stinner committer: GitHub date: 2020-05-28T00:44:23+02:00 summary: bpo-40275: test.support.check_impl_detail() uses sys.implementation (GH-20468) check_impl_detail() of test.support now uses sys.implementation.name, instead of platform.python_implementation().lower(). This change prepares test.support to import the platform module lazily. files: M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index e894545f87e42..c958bae643a71 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1740,7 +1740,7 @@ def check_impl_detail(**guards): if check_impl_detail(cpython=False): # everywhere except on CPython """ guards, default = _parse_guards(guards) - return guards.get(platform.python_implementation().lower(), default) + return guards.get(sys.implementation.name, default) def no_tracing(func): From webhook-mailer at python.org Wed May 27 18:56:44 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 22:56:44 -0000 Subject: [Python-checkins] bpo-40795: ctypes calls unraisablehook with an exception (GH-20452) Message-ID: https://github.com/python/cpython/commit/45ce0dbc4f8c68fe22ae97860faa8f2ec7faf27b commit: 45ce0dbc4f8c68fe22ae97860faa8f2ec7faf27b branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T15:56:36-07:00 summary: bpo-40795: ctypes calls unraisablehook with an exception (GH-20452) If ctypes fails to convert the result of a callback or if a ctypes callback function raises an exception, sys.unraisablehook is now called with an exception set. Previously, the error was logged into stderr by PyErr_Print(). (cherry picked from commit 10228bad0452d94e66c964b625a0b61befa08e59) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst M Lib/ctypes/test/test_callbacks.py M Lib/ctypes/test/test_random_things.py M Lib/ctypes/test/test_unaligned_structures.py M Modules/_ctypes/callbacks.c M Modules/_ctypes/callproc.c diff --git a/Lib/ctypes/test/test_callbacks.py b/Lib/ctypes/test/test_callbacks.py index 937a06d981b00..d8e9c5a760e2c 100644 --- a/Lib/ctypes/test/test_callbacks.py +++ b/Lib/ctypes/test/test_callbacks.py @@ -1,5 +1,7 @@ import functools import unittest +from test import support + from ctypes import * from ctypes.test import need_symbol import _ctypes_test @@ -301,8 +303,22 @@ def func(*args): with self.assertRaises(ArgumentError): cb(*args2) + def test_convert_result_error(self): + def func(): + return ("tuple",) + + proto = CFUNCTYPE(c_int) + ctypes_func = proto(func) + with support.catch_unraisable_exception() as cm: + # don't test the result since it is an uninitialized value + result = ctypes_func() + + self.assertIsInstance(cm.unraisable.exc_value, TypeError) + self.assertEqual(cm.unraisable.err_msg, + "Exception ignored on converting result " + "of ctypes callback function") + self.assertIs(cm.unraisable.object, func) -################################################################ if __name__ == '__main__': unittest.main() diff --git a/Lib/ctypes/test/test_random_things.py b/Lib/ctypes/test/test_random_things.py index ee5b2128ea0fa..2988e275cf4bb 100644 --- a/Lib/ctypes/test/test_random_things.py +++ b/Lib/ctypes/test/test_random_things.py @@ -1,5 +1,9 @@ from ctypes import * -import unittest, sys +import contextlib +from test import support +import unittest +import sys + def callback_func(arg): 42 / arg @@ -34,41 +38,40 @@ class CallbackTracbackTestCase(unittest.TestCase): # created, then a full traceback printed. When SystemExit is # raised in a callback function, the interpreter exits. - def capture_stderr(self, func, *args, **kw): - # helper - call function 'func', and return the captured stderr - import io - old_stderr = sys.stderr - logger = sys.stderr = io.StringIO() - try: - func(*args, **kw) - finally: - sys.stderr = old_stderr - return logger.getvalue() + @contextlib.contextmanager + def expect_unraisable(self, exc_type, exc_msg=None): + with support.catch_unraisable_exception() as cm: + yield + + self.assertIsInstance(cm.unraisable.exc_value, exc_type) + if exc_msg is not None: + self.assertEqual(str(cm.unraisable.exc_value), exc_msg) + self.assertEqual(cm.unraisable.err_msg, + "Exception ignored on calling ctypes " + "callback function") + self.assertIs(cm.unraisable.object, callback_func) def test_ValueError(self): cb = CFUNCTYPE(c_int, c_int)(callback_func) - out = self.capture_stderr(cb, 42) - self.assertEqual(out.splitlines()[-1], - "ValueError: 42") + with self.expect_unraisable(ValueError, '42'): + cb(42) def test_IntegerDivisionError(self): cb = CFUNCTYPE(c_int, c_int)(callback_func) - out = self.capture_stderr(cb, 0) - self.assertEqual(out.splitlines()[-1][:19], - "ZeroDivisionError: ") + with self.expect_unraisable(ZeroDivisionError): + cb(0) def test_FloatDivisionError(self): cb = CFUNCTYPE(c_int, c_double)(callback_func) - out = self.capture_stderr(cb, 0.0) - self.assertEqual(out.splitlines()[-1][:19], - "ZeroDivisionError: ") + with self.expect_unraisable(ZeroDivisionError): + cb(0.0) def test_TypeErrorDivisionError(self): cb = CFUNCTYPE(c_int, c_char_p)(callback_func) - out = self.capture_stderr(cb, b"spam") - self.assertEqual(out.splitlines()[-1], - "TypeError: " - "unsupported operand type(s) for /: 'int' and 'bytes'") + err_msg = "unsupported operand type(s) for /: 'int' and 'bytes'" + with self.expect_unraisable(TypeError, err_msg): + cb(b"spam") + if __name__ == '__main__': unittest.main() diff --git a/Lib/ctypes/test/test_unaligned_structures.py b/Lib/ctypes/test/test_unaligned_structures.py index bcacfc8184b43..ee7fb45809bf7 100644 --- a/Lib/ctypes/test/test_unaligned_structures.py +++ b/Lib/ctypes/test/test_unaligned_structures.py @@ -27,7 +27,6 @@ class Y(SwappedStructure): class TestStructures(unittest.TestCase): def test_native(self): for typ in structures: -## print typ.value self.assertEqual(typ.value.offset, 1) o = typ() o.value = 4 @@ -35,7 +34,6 @@ def test_native(self): def test_swapped(self): for typ in byteswapped_structures: -## print >> sys.stderr, typ.value self.assertEqual(typ.value.offset, 1) o = typ() o.value = 4 diff --git a/Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst b/Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst new file mode 100644 index 0000000000000..dd02fb05cab5e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst @@ -0,0 +1,4 @@ +:mod:`ctypes` module: If ctypes fails to convert the result of a callback or +if a ctypes callback function raises an exception, sys.unraisablehook is now +called with an exception set. Previously, the error was logged into stderr +by :c:func:`PyErr_Print`. diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index d2d9a6587d799..2a364d6c80e15 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -213,9 +213,6 @@ static void _CallPythonObject(void *mem, pArgs++; } -#define CHECK(what, x) \ -if (x == NULL) _PyTraceback_Add(what, "_ctypes/callbacks.c", __LINE__ - 1), PyErr_Print() - if (flags & (FUNCFLAG_USE_ERRNO | FUNCFLAG_USE_LASTERROR)) { error_object = _ctypes_get_errobj(&space); if (error_object == NULL) @@ -235,7 +232,10 @@ if (x == NULL) _PyTraceback_Add(what, "_ctypes/callbacks.c", __LINE__ - 1), PyEr } result = PyObject_CallObject(callable, arglist); - CHECK("'calling callback function'", result); + if (result == NULL) { + _PyErr_WriteUnraisableMsg("on calling ctypes callback function", + callable); + } #ifdef MS_WIN32 if (flags & FUNCFLAG_USE_LASTERROR) { @@ -251,16 +251,17 @@ if (x == NULL) _PyTraceback_Add(what, "_ctypes/callbacks.c", __LINE__ - 1), PyEr } Py_XDECREF(error_object); - if ((restype != &ffi_type_void) && result) { - PyObject *keep; + if (restype != &ffi_type_void && result) { assert(setfunc); + #ifdef WORDS_BIGENDIAN - /* See the corresponding code in callproc.c, around line 961 */ - if (restype->type != FFI_TYPE_FLOAT && restype->size < sizeof(ffi_arg)) + /* See the corresponding code in _ctypes_callproc(): + in callproc.c, around line 1219. */ + if (restype->type != FFI_TYPE_FLOAT && restype->size < sizeof(ffi_arg)) { mem = (char *)mem + sizeof(ffi_arg) - restype->size; + } #endif - keep = setfunc(mem, result, 0); - CHECK("'converting callback result'", keep); + /* keep is an object we have to keep alive so that the result stays valid. If there is no such object, the setfunc will have returned Py_None. @@ -270,18 +271,32 @@ if (x == NULL) _PyTraceback_Add(what, "_ctypes/callbacks.c", __LINE__ - 1), PyEr be the result. EXCEPT when restype is py_object - Python itself knows how to manage the refcount of these objects. */ - if (keep == NULL) /* Could not convert callback result. */ - PyErr_WriteUnraisable(callable); - else if (keep == Py_None) /* Nothing to keep */ + PyObject *keep = setfunc(mem, result, 0); + + if (keep == NULL) { + /* Could not convert callback result. */ + _PyErr_WriteUnraisableMsg("on converting result " + "of ctypes callback function", + callable); + } + else if (keep == Py_None) { + /* Nothing to keep */ Py_DECREF(keep); + } else if (setfunc != _ctypes_get_fielddesc("O")->setfunc) { if (-1 == PyErr_WarnEx(PyExc_RuntimeWarning, "memory leak in callback function.", 1)) - PyErr_WriteUnraisable(callable); + { + _PyErr_WriteUnraisableMsg("on converting result " + "of ctypes callback function", + callable); + } } } + Py_XDECREF(result); + Done: Py_XDECREF(arglist); PyGILState_Release(state); diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index e0a110d3f4604..55fc226ca1258 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1232,7 +1232,9 @@ PyObject *_ctypes_callproc(PPROC pProc, if (rtype->type != FFI_TYPE_FLOAT && rtype->type != FFI_TYPE_STRUCT && rtype->size < sizeof(ffi_arg)) + { resbuf = (char *)resbuf + sizeof(ffi_arg) - rtype->size; + } #endif #ifdef MS_WIN32 From webhook-mailer at python.org Wed May 27 19:17:59 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 27 May 2020 23:17:59 -0000 Subject: [Python-checkins] bpo-39073: validate Address parts to disallow CRLF (GH-19007) (#19224) Message-ID: https://github.com/python/cpython/commit/7df32f844efed33ca781a016017eab7050263b90 commit: 7df32f844efed33ca781a016017eab7050263b90 branch: 3.6 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T19:17:52-04:00 summary: bpo-39073: validate Address parts to disallow CRLF (GH-19007) (#19224) Disallow CR or LF in email.headerregistry.Address arguments to guard against header injection attacks. (cherry picked from commit 614f17211c5fc0e5b828be1d3320661d1038fe8f) Co-authored-by: Ashwin Ramaswami Co-authored-by: Ashwin Ramaswami files: A Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst M Lib/email/headerregistry.py M Lib/test/test_email/test_headerregistry.py diff --git a/Lib/email/headerregistry.py b/Lib/email/headerregistry.py index f5be87f4d2436..6e86b3b26c92f 100644 --- a/Lib/email/headerregistry.py +++ b/Lib/email/headerregistry.py @@ -31,6 +31,11 @@ def __init__(self, display_name='', username='', domain='', addr_spec=None): without any Content Transfer Encoding. """ + + inputs = ''.join(filter(None, (display_name, username, domain, addr_spec))) + if '\r' in inputs or '\n' in inputs: + raise ValueError("invalid arguments; address parts cannot contain CR or LF") + # This clause with its potential 'raise' may only happen when an # application program creates an Address object using an addr_spec # keyword. The email library code itself must always supply username diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py index d1007099f666c..08634daa7f617 100644 --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -1435,6 +1435,25 @@ def test_il8n(self): # with self.assertRaises(ValueError): # Address('foo', 'w?k', 'example.com') + def test_crlf_in_constructor_args_raises(self): + cases = ( + dict(display_name='foo\r'), + dict(display_name='foo\n'), + dict(display_name='foo\r\n'), + dict(domain='example.com\r'), + dict(domain='example.com\n'), + dict(domain='example.com\r\n'), + dict(username='wok\r'), + dict(username='wok\n'), + dict(username='wok\r\n'), + dict(addr_spec='wok at example.com\r'), + dict(addr_spec='wok at example.com\n'), + dict(addr_spec='wok at example.com\r\n') + ) + for kwargs in cases: + with self.subTest(kwargs=kwargs), self.assertRaisesRegex(ValueError, "invalid arguments"): + Address(**kwargs) + def test_non_ascii_username_in_addr_spec_raises(self): with self.assertRaises(ValueError): Address('foo', addr_spec='w?k at example.com') diff --git a/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst new file mode 100644 index 0000000000000..6c9447b897bf6 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst @@ -0,0 +1 @@ +Disallow CR or LF in email.headerregistry.Address arguments to guard against header injection attacks. From webhook-mailer at python.org Wed May 27 19:56:37 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 27 May 2020 23:56:37 -0000 Subject: [Python-checkins] bpo-40275: test.support imports subprocess lazily (GH-20471) Message-ID: https://github.com/python/cpython/commit/aa890630bc877c73ec806c9982d8b7b2e7019723 commit: aa890630bc877c73ec806c9982d8b7b2e7019723 branch: master author: Victor Stinner committer: GitHub date: 2020-05-28T01:56:29+02:00 summary: bpo-40275: test.support imports subprocess lazily (GH-20471) test.support module now imports the platform and subprocess modules lazily to reduce the number of modules imported by "import test.support". With this change, the threading module is no longer imported indirectly by "import test.support". Use sys.version rather than platform.machine() to detect the Windows ARM32 buildbot. files: M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index c958bae643a71..4b87a0c574c1b 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -12,11 +12,9 @@ import importlib import importlib.util import os -import platform import re import stat import struct -import subprocess import sys import sysconfig import time @@ -81,7 +79,7 @@ # The timeout should be long enough for connect(), recv() and send() methods # of socket.socket. LOOPBACK_TIMEOUT = 5.0 -if sys.platform == 'win32' and platform.machine() == 'ARM': +if sys.platform == 'win32' and ' 32 bit (ARM)' in sys.version: # bpo-37553: test_socket.SendfileUsingSendTest is taking longer than 2 # seconds on Windows ARM32 buildbot LOOPBACK_TIMEOUT = 10 @@ -481,6 +479,7 @@ def forget(modname): def _is_gui_available(): if hasattr(_is_gui_available, 'result'): return _is_gui_available.result + import platform reason = None if sys.platform.startswith('win') and platform.win32_is_iot(): reason = "gui is not available on Windows IoT Core" @@ -581,6 +580,7 @@ def _requires_unix_version(sysname, min_version): def decorator(func): @functools.wraps(func) def wrapper(*args, **kw): + import platform if platform.system() == sysname: version_txt = platform.release().split('-', 1)[0] try: @@ -627,6 +627,7 @@ def decorator(func): @functools.wraps(func) def wrapper(*args, **kw): if sys.platform == 'darwin': + import platform version_txt = platform.mac_ver()[0] try: version = tuple(map(int, version_txt.split('.'))) @@ -1607,6 +1608,7 @@ def start(self): sys.stderr.flush() return + import subprocess with f: watchdog_script = findfile("memory_watchdog.py") self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script], @@ -2088,11 +2090,13 @@ def swap_item(obj, item, new_val): def args_from_interpreter_flags(): """Return a list of command-line arguments reproducing the current settings in sys.flags and sys.warnoptions.""" + import subprocess return subprocess._args_from_interpreter_flags() def optim_args_from_interpreter_flags(): """Return a list of command-line arguments reproducing the current optimization settings in sys.flags.""" + import subprocess return subprocess._optim_args_from_interpreter_flags() @@ -2233,6 +2237,7 @@ def __exit__(self, exc_type, exc_value, exc_tb): print("failed to clean up {}: {}".format(link, ex)) def _call(self, python, args, env, returncode): + import subprocess cmd = [python, *args] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) @@ -2261,6 +2266,7 @@ def can_xattr(): if not hasattr(os, "setxattr"): can = False else: + import platform tmp_dir = tempfile.mkdtemp() tmp_fp, tmp_name = tempfile.mkstemp(dir=tmp_dir) try: @@ -2445,6 +2451,7 @@ def __enter__(self): pass if sys.platform == 'darwin': + import subprocess # Check if the 'Crash Reporter' on OSX was configured # in 'Developer' mode and warn that it will get triggered # when it is. @@ -2591,6 +2598,7 @@ def setswitchinterval(interval): if is_android and interval < minimum_interval: global _is_android_emulator if _is_android_emulator is None: + import subprocess _is_android_emulator = (subprocess.check_output( ['getprop', 'ro.kernel.qemu']).strip() == b'1') if _is_android_emulator: From webhook-mailer at python.org Wed May 27 20:41:38 2020 From: webhook-mailer at python.org (Elazar Gershuni) Date: Thu, 28 May 2020 00:41:38 -0000 Subject: [Python-checkins] bpo-39939: Fix removeprefix issue number in the What's New in Python 3.9 (GH-20473) Message-ID: https://github.com/python/cpython/commit/56853d8ec6ed89bf5a9b81c3781a4df46ac391d3 commit: 56853d8ec6ed89bf5a9b81c3781a4df46ac391d3 branch: master author: Elazar Gershuni committer: GitHub date: 2020-05-28T02:41:24+02:00 summary: bpo-39939: Fix removeprefix issue number in the What's New in Python 3.9 (GH-20473) files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 8a04f72513357..35496d7b8f5ef 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -113,7 +113,7 @@ PEP 616: New removeprefix() and removesuffix() string methods to easily remove an unneeded prefix or a suffix from a string. Corresponding ``bytes``, ``bytearray``, and ``collections.UserString`` methods have also been added. See :pep:`616` for a full description. (Contributed by Dennis Sweeney in -:issue:`18939`.) +:issue:`39939`.) PEP 585: Builtin Generic Types ------------------------------ From webhook-mailer at python.org Wed May 27 21:34:09 2020 From: webhook-mailer at python.org (Adorilson Bezerra) Date: Thu, 28 May 2020 01:34:09 -0000 Subject: [Python-checkins] Improve IO tutorial's "Old string formatting" section (GH-16251) Message-ID: https://github.com/python/cpython/commit/eaca2aa117d663acf8160a0b4543ee2c7006fcc7 commit: eaca2aa117d663acf8160a0b4543ee2c7006fcc7 branch: master author: Adorilson Bezerra committer: GitHub date: 2020-05-27T21:34:01-04:00 summary: Improve IO tutorial's "Old string formatting" section (GH-16251) * Use a more universal explanation of string interpolation rather than specifically referencing sprintf(), which depends on the reader having a C background. Co-authored-by: Kyle Stanley files: M Doc/tutorial/inputoutput.rst diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst index a404f4be19f1b..366a532e817af 100644 --- a/Doc/tutorial/inputoutput.rst +++ b/Doc/tutorial/inputoutput.rst @@ -172,7 +172,7 @@ Positional and keyword arguments can be arbitrarily combined:: If you have a really long format string that you don't want to split up, it would be nice if you could reference the variables to be formatted by name instead of by position. This can be done by simply passing the dict and using -square brackets ``'[]'`` to access the keys :: +square brackets ``'[]'`` to access the keys. :: >>> table = {'Sjoerd': 4127, 'Jack': 4098, 'Dcab': 8637678} >>> print('Jack: {0[Jack]:d}; Sjoerd: {0[Sjoerd]:d}; ' @@ -257,10 +257,10 @@ left with zeros. It understands about plus and minus signs:: Old string formatting --------------------- -The ``%`` operator can also be used for string formatting. It interprets the -left argument much like a :c:func:`sprintf`\ -style format string to be applied -to the right argument, and returns the string resulting from this formatting -operation. For example:: +The % operator (modulo) can also be used for string formatting. Given ``'string' +% values``, instances of ``%`` in ``string`` are replaced with zero or more +elements of ``values``. This operation is commonly known as string +interpolation. For example:: >>> import math >>> print('The value of pi is approximately %5.3f.' % math.pi) From webhook-mailer at python.org Wed May 27 21:39:23 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 28 May 2020 01:39:23 -0000 Subject: [Python-checkins] Improve IO tutorial's "Old string formatting" section (GH-16251) Message-ID: https://github.com/python/cpython/commit/f5bd99b84f89cc4cf50a3abd8be801b366fc9147 commit: f5bd99b84f89cc4cf50a3abd8be801b366fc9147 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T18:39:19-07:00 summary: Improve IO tutorial's "Old string formatting" section (GH-16251) * Use a more universal explanation of string interpolation rather than specifically referencing sprintf(), which depends on the reader having a C background. Co-authored-by: Kyle Stanley (cherry picked from commit eaca2aa117d663acf8160a0b4543ee2c7006fcc7) Co-authored-by: Adorilson Bezerra files: M Doc/tutorial/inputoutput.rst diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst index 74f381d4a0da5..62a982c10e093 100644 --- a/Doc/tutorial/inputoutput.rst +++ b/Doc/tutorial/inputoutput.rst @@ -172,7 +172,7 @@ Positional and keyword arguments can be arbitrarily combined:: If you have a really long format string that you don't want to split up, it would be nice if you could reference the variables to be formatted by name instead of by position. This can be done by simply passing the dict and using -square brackets ``'[]'`` to access the keys :: +square brackets ``'[]'`` to access the keys. :: >>> table = {'Sjoerd': 4127, 'Jack': 4098, 'Dcab': 8637678} >>> print('Jack: {0[Jack]:d}; Sjoerd: {0[Sjoerd]:d}; ' @@ -257,10 +257,10 @@ left with zeros. It understands about plus and minus signs:: Old string formatting --------------------- -The ``%`` operator can also be used for string formatting. It interprets the -left argument much like a :c:func:`sprintf`\ -style format string to be applied -to the right argument, and returns the string resulting from this formatting -operation. For example:: +The % operator (modulo) can also be used for string formatting. Given ``'string' +% values``, instances of ``%`` in ``string`` are replaced with zero or more +elements of ``values``. This operation is commonly known as string +interpolation. For example:: >>> import math >>> print('The value of pi is approximately %5.3f.' % math.pi) From webhook-mailer at python.org Wed May 27 21:41:36 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 28 May 2020 01:41:36 -0000 Subject: [Python-checkins] Improve IO tutorial's "Old string formatting" section (GH-16251) Message-ID: https://github.com/python/cpython/commit/c0c5db54cf9243ee3d6bd4c0306c6ba39db186c3 commit: c0c5db54cf9243ee3d6bd4c0306c6ba39db186c3 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-27T18:41:31-07:00 summary: Improve IO tutorial's "Old string formatting" section (GH-16251) * Use a more universal explanation of string interpolation rather than specifically referencing sprintf(), which depends on the reader having a C background. Co-authored-by: Kyle Stanley (cherry picked from commit eaca2aa117d663acf8160a0b4543ee2c7006fcc7) Co-authored-by: Adorilson Bezerra files: M Doc/tutorial/inputoutput.rst diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst index a404f4be19f1b..366a532e817af 100644 --- a/Doc/tutorial/inputoutput.rst +++ b/Doc/tutorial/inputoutput.rst @@ -172,7 +172,7 @@ Positional and keyword arguments can be arbitrarily combined:: If you have a really long format string that you don't want to split up, it would be nice if you could reference the variables to be formatted by name instead of by position. This can be done by simply passing the dict and using -square brackets ``'[]'`` to access the keys :: +square brackets ``'[]'`` to access the keys. :: >>> table = {'Sjoerd': 4127, 'Jack': 4098, 'Dcab': 8637678} >>> print('Jack: {0[Jack]:d}; Sjoerd: {0[Sjoerd]:d}; ' @@ -257,10 +257,10 @@ left with zeros. It understands about plus and minus signs:: Old string formatting --------------------- -The ``%`` operator can also be used for string formatting. It interprets the -left argument much like a :c:func:`sprintf`\ -style format string to be applied -to the right argument, and returns the string resulting from this formatting -operation. For example:: +The % operator (modulo) can also be used for string formatting. Given ``'string' +% values``, instances of ``%`` in ``string`` are replaced with zero or more +elements of ``values``. This operation is commonly known as string +interpolation. For example:: >>> import math >>> print('The value of pi is approximately %5.3f.' % math.pi) From webhook-mailer at python.org Thu May 28 03:34:44 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 28 May 2020 07:34:44 -0000 Subject: [Python-checkins] bpo-40792: Make the result of PyNumber_Index() always having exact type int. (GH-20443) Message-ID: https://github.com/python/cpython/commit/5f4b229df7812f1788287095eb6b138bb21876a4 commit: 5f4b229df7812f1788287095eb6b138bb21876a4 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-05-28T10:33:45+03:00 summary: bpo-40792: Make the result of PyNumber_Index() always having exact type int. (GH-20443) Previously, the result could have been an instance of a subclass of int. Also revert bpo-26202 and make attributes start, stop and step of the range object having exact type int. Add private function _PyNumber_Index() which preserves the old behavior of PyNumber_Index() for performance to use it in the conversion functions like PyLong_AsLong(). files: A Misc/NEWS.d/next/C API/2020-05-27-11-02-15.bpo-40792.pBw2Bb.rst A Misc/NEWS.d/next/Core and Builtins/2020-05-27-22-37-58.bpo-40792.WEDqqU.rst A Misc/NEWS.d/next/Library/2020-05-27-22-19-42.bpo-40792.87Yx01.rst M Doc/c-api/number.rst M Doc/library/operator.rst M Doc/whatsnew/3.10.rst M Include/cpython/abstract.h M Lib/copy.py M Lib/test/clinic.test M Lib/test/test_copy.py M Lib/test/test_range.py M Modules/_io/_iomodule.c M Modules/_io/clinic/bufferedio.c.h M Modules/_io/clinic/bytesio.c.h M Modules/_io/clinic/iobase.c.h M Modules/_io/clinic/stringio.c.h M Modules/_io/clinic/textio.c.h M Modules/_struct.c M Modules/arraymodule.c M Modules/clinic/_bisectmodule.c.h M Modules/clinic/_bz2module.c.h M Modules/clinic/_collectionsmodule.c.h M Modules/clinic/_elementtree.c.h M Modules/clinic/_hashopenssl.c.h M Modules/clinic/_lzmamodule.c.h M Modules/clinic/_operator.c.h M Modules/clinic/_sre.c.h M Modules/clinic/_struct.c.h M Modules/clinic/arraymodule.c.h M Modules/clinic/audioop.c.h M Modules/clinic/itertoolsmodule.c.h M Modules/clinic/posixmodule.c.h M Modules/clinic/zlibmodule.c.h M Modules/mathmodule.c M Modules/posixmodule.c M Objects/abstract.c M Objects/bytesobject.c M Objects/clinic/bytearrayobject.c.h M Objects/clinic/bytesobject.c.h M Objects/clinic/listobject.c.h M Objects/clinic/longobject.c.h M Objects/clinic/unicodeobject.c.h M Objects/floatobject.c M Objects/longobject.c M Objects/memoryobject.c M Objects/stringlib/clinic/transmogrify.h.h M Objects/typeobject.c M Objects/unicodeobject.c M Python/clinic/_warnings.c.h M Python/getargs.c M Tools/clinic/clinic.py diff --git a/Doc/c-api/number.rst b/Doc/c-api/number.rst index 620204ca8e229..37979bb506bcf 100644 --- a/Doc/c-api/number.rst +++ b/Doc/c-api/number.rst @@ -256,6 +256,10 @@ Number Protocol Returns the *o* converted to a Python int on success or ``NULL`` with a :exc:`TypeError` exception raised on failure. + .. versionchanged:: 3.10 + The result always has exact type :class:`int`. Previously, the result + could have been an instance of a subclass of ``int``. + .. c:function:: PyObject* PyNumber_ToBase(PyObject *n, int base) diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst index fa02bde84650e..36c53556c2685 100644 --- a/Doc/library/operator.rst +++ b/Doc/library/operator.rst @@ -112,6 +112,10 @@ The mathematical and bitwise operations are the most numerous: Return *a* converted to an integer. Equivalent to ``a.__index__()``. + .. versionchanged:: 3.10 + The result always has exact type :class:`int`. Previously, the result + could have been an instance of a subclass of ``int``. + .. function:: inv(obj) invert(obj) diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 9edef1ed87ba1..34a09fe4b505c 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -129,6 +129,10 @@ C API Changes New Features ------------ + The result of :c:func:`PyNumber_Index` now always has exact type :class:`int`. + Previously, the result could have been an instance of a subclass of ``int``. + (Contributed by Serhiy Storchaka in :issue:`40792`.) + Porting to Python 3.10 ---------------------- diff --git a/Include/cpython/abstract.h b/Include/cpython/abstract.h index 7bc80833a746e..aa72f998b701c 100644 --- a/Include/cpython/abstract.h +++ b/Include/cpython/abstract.h @@ -379,6 +379,9 @@ PyAPI_FUNC(void) _Py_add_one_to_index_C(int nd, Py_ssize_t *index, /* Convert Python int to Py_ssize_t. Do nothing if the argument is None. */ PyAPI_FUNC(int) _Py_convert_optional_to_ssize_t(PyObject *, void *); +/* Same as PyNumber_Index but can return an instance of a subclass of int. */ +PyAPI_FUNC(PyObject *) _PyNumber_Index(PyObject *o); + #ifdef __cplusplus } #endif diff --git a/Lib/copy.py b/Lib/copy.py index 41873f2c046ca..dd41c54dffe1d 100644 --- a/Lib/copy.py +++ b/Lib/copy.py @@ -192,6 +192,7 @@ def _deepcopy_atomic(x, memo): d[str] = _deepcopy_atomic d[types.CodeType] = _deepcopy_atomic d[type] = _deepcopy_atomic +d[range] = _deepcopy_atomic d[types.BuiltinFunctionType] = _deepcopy_atomic d[types.FunctionType] = _deepcopy_atomic d[weakref.ref] = _deepcopy_atomic diff --git a/Lib/test/clinic.test b/Lib/test/clinic.test index 5e6f129f0926e..f2be61355cc97 100644 --- a/Lib/test/clinic.test +++ b/Lib/test/clinic.test @@ -1332,7 +1332,7 @@ test_Py_ssize_t_converter(PyObject *module, PyObject *const *args, Py_ssize_t na } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -1347,7 +1347,7 @@ test_Py_ssize_t_converter(PyObject *module, PyObject *const *args, Py_ssize_t na } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -1373,7 +1373,7 @@ exit: static PyObject * test_Py_ssize_t_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, Py_ssize_t c) -/*[clinic end generated code: output=ea781bb7169b3436 input=3855f184bb3f299d]*/ +/*[clinic end generated code: output=3bf73f9fdfeab468 input=3855f184bb3f299d]*/ /*[clinic input] diff --git a/Lib/test/test_copy.py b/Lib/test/test_copy.py index 35f72fb216b64..ba3d233f63d1c 100644 --- a/Lib/test/test_copy.py +++ b/Lib/test/test_copy.py @@ -357,7 +357,7 @@ def f(): pass tests = [None, 42, 2**100, 3.14, True, False, 1j, "hello", "hello\u1234", f.__code__, - NewStyle, Classic, max, property()] + NewStyle, range(10), Classic, max, property()] for x in tests: self.assertIs(copy.deepcopy(x), x) @@ -579,17 +579,6 @@ class C: self.assertIsNot(y, x) self.assertIs(y.foo, y) - def test_deepcopy_range(self): - class I(int): - pass - x = range(I(10)) - y = copy.deepcopy(x) - self.assertIsNot(y, x) - self.assertEqual(y, x) - self.assertIsNot(y.stop, x.stop) - self.assertEqual(y.stop, x.stop) - self.assertIsInstance(y.stop, I) - # _reconstruct() def test_reconstruct_string(self): diff --git a/Lib/test/test_range.py b/Lib/test/test_range.py index 30fa129b50ecb..107c0e2e11c7c 100644 --- a/Lib/test/test_range.py +++ b/Lib/test/test_range.py @@ -648,11 +648,17 @@ def test_attributes(self): self.assert_attrs(range(0, 10, 3), 0, 10, 3) self.assert_attrs(range(10, 0, -1), 10, 0, -1) self.assert_attrs(range(10, 0, -3), 10, 0, -3) + self.assert_attrs(range(True), 0, 1, 1) + self.assert_attrs(range(False, True), 0, 1, 1) + self.assert_attrs(range(False, True, True), 0, 1, 1) def assert_attrs(self, rangeobj, start, stop, step): self.assertEqual(rangeobj.start, start) self.assertEqual(rangeobj.stop, stop) self.assertEqual(rangeobj.step, step) + self.assertIs(type(rangeobj.start), int) + self.assertIs(type(rangeobj.stop), int) + self.assertIs(type(rangeobj.step), int) with self.assertRaises(AttributeError): rangeobj.start = 0 diff --git a/Misc/NEWS.d/next/C API/2020-05-27-11-02-15.bpo-40792.pBw2Bb.rst b/Misc/NEWS.d/next/C API/2020-05-27-11-02-15.bpo-40792.pBw2Bb.rst new file mode 100644 index 0000000000000..4cfe09bc774af --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-27-11-02-15.bpo-40792.pBw2Bb.rst @@ -0,0 +1,2 @@ +The result of :c:func:`PyNumber_Index` now always has exact type :class:`int`. +Previously, the result could have been an instance of a subclass of ``int``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-27-22-37-58.bpo-40792.WEDqqU.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-27-22-37-58.bpo-40792.WEDqqU.rst new file mode 100644 index 0000000000000..5986a221f5b37 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-27-22-37-58.bpo-40792.WEDqqU.rst @@ -0,0 +1,3 @@ +Attributes ``start``, ``stop`` and ``step`` of the :class:`range` object now +always has exact type :class:`int`. Previously, they could have been an +instance of a subclass of ``int``. diff --git a/Misc/NEWS.d/next/Library/2020-05-27-22-19-42.bpo-40792.87Yx01.rst b/Misc/NEWS.d/next/Library/2020-05-27-22-19-42.bpo-40792.87Yx01.rst new file mode 100644 index 0000000000000..032a96c6a5cb6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-27-22-19-42.bpo-40792.87Yx01.rst @@ -0,0 +1,2 @@ +The result of :func:`operator.index` now always has exact type :class:`int`. +Previously, the result could have been an instance of a subclass of ``int``. diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c index d7cadacea1b5b..a55e5cad6a392 100644 --- a/Modules/_io/_iomodule.c +++ b/Modules/_io/_iomodule.c @@ -532,7 +532,7 @@ PyNumber_AsOff_t(PyObject *item, PyObject *err) { Py_off_t result; PyObject *runerr; - PyObject *value = PyNumber_Index(item); + PyObject *value = _PyNumber_Index(item); if (value == NULL) return -1; diff --git a/Modules/_io/clinic/bufferedio.c.h b/Modules/_io/clinic/bufferedio.c.h index 1961ed94c5535..19543fdf79d58 100644 --- a/Modules/_io/clinic/bufferedio.c.h +++ b/Modules/_io/clinic/bufferedio.c.h @@ -122,7 +122,7 @@ _io__Buffered_peek(buffered *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -197,7 +197,7 @@ _io__Buffered_read1(buffered *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -421,7 +421,7 @@ _io_BufferedReader___init__(PyObject *self, PyObject *args, PyObject *kwargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(fastargs[1]); + PyObject *iobj = _PyNumber_Index(fastargs[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -475,7 +475,7 @@ _io_BufferedWriter___init__(PyObject *self, PyObject *args, PyObject *kwargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(fastargs[1]); + PyObject *iobj = _PyNumber_Index(fastargs[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -567,7 +567,7 @@ _io_BufferedRWPair___init__(PyObject *self, PyObject *args, PyObject *kwargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(PyTuple_GET_ITEM(args, 2)); + PyObject *iobj = _PyNumber_Index(PyTuple_GET_ITEM(args, 2)); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -621,7 +621,7 @@ _io_BufferedRandom___init__(PyObject *self, PyObject *args, PyObject *kwargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(fastargs[1]); + PyObject *iobj = _PyNumber_Index(fastargs[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -637,4 +637,4 @@ _io_BufferedRandom___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=1882bb497ddc9375 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=98ccf7610c0e82ba input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/bytesio.c.h b/Modules/_io/clinic/bytesio.c.h index 4720bdd655586..5ea80ae01c14e 100644 --- a/Modules/_io/clinic/bytesio.c.h +++ b/Modules/_io/clinic/bytesio.c.h @@ -404,7 +404,7 @@ _io_BytesIO_seek(bytesio *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -505,4 +505,4 @@ _io_BytesIO___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=ba0f302f16397741 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=49a32140eb8c5555 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/iobase.c.h b/Modules/_io/clinic/iobase.c.h index 02a2ab86c4e37..4fd6e18c4efd2 100644 --- a/Modules/_io/clinic/iobase.c.h +++ b/Modules/_io/clinic/iobase.c.h @@ -276,7 +276,7 @@ _io__RawIOBase_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -310,4 +310,4 @@ _io__RawIOBase_readall(PyObject *self, PyObject *Py_UNUSED(ignored)) { return _io__RawIOBase_readall_impl(self); } -/*[clinic end generated code: output=1f9ce590549593be input=a9049054013a1b77]*/ +/*[clinic end generated code: output=83c1361a7a51ca84 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/stringio.c.h b/Modules/_io/clinic/stringio.c.h index 2b32319f985fc..147ef668a0b09 100644 --- a/Modules/_io/clinic/stringio.c.h +++ b/Modules/_io/clinic/stringio.c.h @@ -179,7 +179,7 @@ _io_StringIO_seek(stringio *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -338,4 +338,4 @@ _io_StringIO_seekable(stringio *self, PyObject *Py_UNUSED(ignored)) { return _io_StringIO_seekable_impl(self); } -/*[clinic end generated code: output=9c428b2942d54991 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=eea93dcab10d0a97 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h index f0ad69ce33238..0b047ac0aab4e 100644 --- a/Modules/_io/clinic/textio.c.h +++ b/Modules/_io/clinic/textio.c.h @@ -452,7 +452,7 @@ _io_TextIOWrapper_readline(textio *self, PyObject *const *args, Py_ssize_t nargs } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -671,4 +671,4 @@ _io_TextIOWrapper_close(textio *self, PyObject *Py_UNUSED(ignored)) { return _io_TextIOWrapper_close_impl(self); } -/*[clinic end generated code: output=ea96ee1eb3a71f77 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2604c8f3a45b9a03 input=a9049054013a1b77]*/ diff --git a/Modules/_struct.c b/Modules/_struct.c index f759f0b169418..81cdbb9b817d3 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -121,7 +121,7 @@ get_pylong(PyObject *v) if (!PyLong_Check(v)) { /* Not an integer; try to use __index__ to convert. */ if (PyIndex_Check(v)) { - v = PyNumber_Index(v); + v = _PyNumber_Index(v); if (v == NULL) return NULL; } diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 4c3ddc3ac2457..8f12c61646335 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -344,7 +344,7 @@ II_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) int do_decref = 0; /* if nb_int was called */ if (!PyLong_Check(v)) { - v = PyNumber_Index(v); + v = _PyNumber_Index(v); if (NULL == v) { return -1; } @@ -404,7 +404,7 @@ LL_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) int do_decref = 0; /* if nb_int was called */ if (!PyLong_Check(v)) { - v = PyNumber_Index(v); + v = _PyNumber_Index(v); if (NULL == v) { return -1; } @@ -457,7 +457,7 @@ QQ_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) int do_decref = 0; /* if nb_int was called */ if (!PyLong_Check(v)) { - v = PyNumber_Index(v); + v = _PyNumber_Index(v); if (NULL == v) { return -1; } diff --git a/Modules/clinic/_bisectmodule.c.h b/Modules/clinic/_bisectmodule.c.h index 8a0170a2c7f56..07fc9060d1d8f 100644 --- a/Modules/clinic/_bisectmodule.c.h +++ b/Modules/clinic/_bisectmodule.c.h @@ -48,7 +48,7 @@ _bisect_bisect_right(PyObject *module, PyObject *const *args, Py_ssize_t nargs, if (args[2]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -119,7 +119,7 @@ _bisect_insort_right(PyObject *module, PyObject *const *args, Py_ssize_t nargs, if (args[2]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -189,7 +189,7 @@ _bisect_bisect_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P if (args[2]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -260,7 +260,7 @@ _bisect_insort_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P if (args[2]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -283,4 +283,4 @@ _bisect_insort_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P exit: return return_value; } -/*[clinic end generated code: output=e9097a9acd10b13f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6cf46f205659f01a input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_bz2module.c.h b/Modules/clinic/_bz2module.c.h index ab610a141e300..466020787449e 100644 --- a/Modules/clinic/_bz2module.c.h +++ b/Modules/clinic/_bz2module.c.h @@ -159,7 +159,7 @@ _bz2_BZ2Decompressor_decompress(BZ2Decompressor *self, PyObject *const *args, Py } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -210,4 +210,4 @@ _bz2_BZ2Decompressor___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=c69a7de8e26c2ad1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=b49102ee26928a28 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_collectionsmodule.c.h b/Modules/clinic/_collectionsmodule.c.h index 0cc1466549d72..7e18aeb312c4a 100644 --- a/Modules/clinic/_collectionsmodule.c.h +++ b/Modules/clinic/_collectionsmodule.c.h @@ -52,7 +52,7 @@ tuplegetter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(PyTuple_GET_ITEM(args, 0)); + PyObject *iobj = _PyNumber_Index(PyTuple_GET_ITEM(args, 0)); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -68,4 +68,4 @@ tuplegetter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=439d77631a056b4d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=947186d369f50f1e input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_elementtree.c.h b/Modules/clinic/_elementtree.c.h index 5934218a52b95..2c9ba47823bf4 100644 --- a/Modules/clinic/_elementtree.c.h +++ b/Modules/clinic/_elementtree.c.h @@ -432,7 +432,7 @@ _elementtree_Element_insert(ElementObject *self, PyObject *const *args, Py_ssize } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -915,4 +915,4 @@ _elementtree_XMLParser__setevents(XMLParserObject *self, PyObject *const *args, exit: return return_value; } -/*[clinic end generated code: output=c98b210c525a9338 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1385b5e5688f3614 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index 51ae2402896c1..e72b55885fe1c 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -94,7 +94,7 @@ EVPXOF_digest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -141,7 +141,7 @@ EVPXOF_hexdigest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObj } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -1417,4 +1417,4 @@ _hashlib_compare_digest(PyObject *module, PyObject *const *args, Py_ssize_t narg #ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF #define _HASHLIB_GET_FIPS_MODE_METHODDEF #endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */ -/*[clinic end generated code: output=95447a60132f039e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2bbd6159493f44ea input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_lzmamodule.c.h b/Modules/clinic/_lzmamodule.c.h index 40913ef295489..e4e0a7945a8fb 100644 --- a/Modules/clinic/_lzmamodule.c.h +++ b/Modules/clinic/_lzmamodule.c.h @@ -118,7 +118,7 @@ _lzma_LZMADecompressor_decompress(Decompressor *self, PyObject *const *args, Py_ } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -319,4 +319,4 @@ _lzma__decode_filter_properties(PyObject *module, PyObject *const *args, Py_ssiz return return_value; } -/*[clinic end generated code: output=a87074ca902bd432 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d6e997ebc269f78f input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_operator.c.h b/Modules/clinic/_operator.c.h index 2a66f8ff1a542..34b6fdadfb730 100644 --- a/Modules/clinic/_operator.c.h +++ b/Modules/clinic/_operator.c.h @@ -1426,7 +1426,7 @@ _operator_length_hint(PyObject *module, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -1486,4 +1486,4 @@ _operator__compare_digest(PyObject *module, PyObject *const *args, Py_ssize_t na exit: return return_value; } -/*[clinic end generated code: output=1fe4adf4f5761420 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=eae5d08f971a65fd input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_sre.c.h b/Modules/clinic/_sre.c.h index fc3ae04792f88..2314114a1bc80 100644 --- a/Modules/clinic/_sre.c.h +++ b/Modules/clinic/_sre.c.h @@ -193,7 +193,7 @@ _sre_SRE_Pattern_match(PatternObject *self, PyObject *const *args, Py_ssize_t na if (args[1]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -209,7 +209,7 @@ _sre_SRE_Pattern_match(PatternObject *self, PyObject *const *args, Py_ssize_t na } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -262,7 +262,7 @@ _sre_SRE_Pattern_fullmatch(PatternObject *self, PyObject *const *args, Py_ssize_ if (args[1]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -278,7 +278,7 @@ _sre_SRE_Pattern_fullmatch(PatternObject *self, PyObject *const *args, Py_ssize_ } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -333,7 +333,7 @@ _sre_SRE_Pattern_search(PatternObject *self, PyObject *const *args, Py_ssize_t n if (args[1]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -349,7 +349,7 @@ _sre_SRE_Pattern_search(PatternObject *self, PyObject *const *args, Py_ssize_t n } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -402,7 +402,7 @@ _sre_SRE_Pattern_findall(PatternObject *self, PyObject *const *args, Py_ssize_t if (args[1]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -418,7 +418,7 @@ _sre_SRE_Pattern_findall(PatternObject *self, PyObject *const *args, Py_ssize_t } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -473,7 +473,7 @@ _sre_SRE_Pattern_finditer(PatternObject *self, PyObject *const *args, Py_ssize_t if (args[1]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -489,7 +489,7 @@ _sre_SRE_Pattern_finditer(PatternObject *self, PyObject *const *args, Py_ssize_t } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -541,7 +541,7 @@ _sre_SRE_Pattern_scanner(PatternObject *self, PyObject *const *args, Py_ssize_t if (args[1]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -557,7 +557,7 @@ _sre_SRE_Pattern_scanner(PatternObject *self, PyObject *const *args, Py_ssize_t } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -608,7 +608,7 @@ _sre_SRE_Pattern_split(PatternObject *self, PyObject *const *args, Py_ssize_t na } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -661,7 +661,7 @@ _sre_SRE_Pattern_sub(PatternObject *self, PyObject *const *args, Py_ssize_t narg } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -714,7 +714,7 @@ _sre_SRE_Pattern_subn(PatternObject *self, PyObject *const *args, Py_ssize_t nar } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -800,7 +800,7 @@ _sre_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject code = args[2]; { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[3]); + PyObject *iobj = _PyNumber_Index(args[3]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -1102,4 +1102,4 @@ _sre_SRE_Scanner_search(ScannerObject *self, PyObject *Py_UNUSED(ignored)) { return _sre_SRE_Scanner_search_impl(self); } -/*[clinic end generated code: output=7a3360917b40a808 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0e27915b1eb7c0e4 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_struct.c.h b/Modules/clinic/_struct.c.h index 874f30a445008..1cfaef3b7131e 100644 --- a/Modules/clinic/_struct.c.h +++ b/Modules/clinic/_struct.c.h @@ -126,7 +126,7 @@ Struct_unpack_from(PyStructObject *self, PyObject *const *args, Py_ssize_t nargs } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -312,7 +312,7 @@ unpack_from(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -376,4 +376,4 @@ iter_unpack(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } -/*[clinic end generated code: output=1205daf7f616f0cf input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8089792d8ed0c1be input=a9049054013a1b77]*/ diff --git a/Modules/clinic/arraymodule.c.h b/Modules/clinic/arraymodule.c.h index 334db39db16ea..300cd1397101e 100644 --- a/Modules/clinic/arraymodule.c.h +++ b/Modules/clinic/arraymodule.c.h @@ -84,7 +84,7 @@ array_array_pop(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -134,7 +134,7 @@ array_array_insert(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -245,7 +245,7 @@ array_array_fromfile(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) f = args[0]; { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -514,4 +514,4 @@ PyDoc_STRVAR(array_arrayiterator___setstate____doc__, #define ARRAY_ARRAYITERATOR___SETSTATE___METHODDEF \ {"__setstate__", (PyCFunction)array_arrayiterator___setstate__, METH_O, array_arrayiterator___setstate____doc__}, -/*[clinic end generated code: output=c953eb8486c7c8da input=a9049054013a1b77]*/ +/*[clinic end generated code: output=91c1cded65a1285f input=a9049054013a1b77]*/ diff --git a/Modules/clinic/audioop.c.h b/Modules/clinic/audioop.c.h index 56d31d3d721eb..da12bd17b5a60 100644 --- a/Modules/clinic/audioop.c.h +++ b/Modules/clinic/audioop.c.h @@ -39,7 +39,7 @@ audioop_getsample(PyObject *module, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -372,7 +372,7 @@ audioop_findmax(PyObject *module, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -1309,4 +1309,4 @@ audioop_adpcm2lin(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } -/*[clinic end generated code: output=343e5ae478fc0359 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=840f8c315ebd4946 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/itertoolsmodule.c.h b/Modules/clinic/itertoolsmodule.c.h index c6d6717f89e3c..c1192bbcb0d79 100644 --- a/Modules/clinic/itertoolsmodule.c.h +++ b/Modules/clinic/itertoolsmodule.c.h @@ -172,7 +172,7 @@ itertools_tee(PyObject *module, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -353,7 +353,7 @@ itertools_combinations(PyTypeObject *type, PyObject *args, PyObject *kwargs) iterable = fastargs[0]; { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(fastargs[1]); + PyObject *iobj = _PyNumber_Index(fastargs[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -401,7 +401,7 @@ itertools_combinations_with_replacement(PyTypeObject *type, PyObject *args, PyOb iterable = fastargs[0]; { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(fastargs[1]); + PyObject *iobj = _PyNumber_Index(fastargs[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -627,4 +627,4 @@ itertools_count(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=07211f86c4153050 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d7f58dc477814b45 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index 96ea02035ea41..ff439ee47c393 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -4743,7 +4743,7 @@ os_read(PyObject *module, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -4843,7 +4843,7 @@ os_pread(PyObject *module, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -5112,7 +5112,7 @@ os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[3]); + PyObject *iobj = _PyNumber_Index(args[3]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -5192,7 +5192,7 @@ os_sendfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject offobj = args[2]; { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[3]); + PyObject *iobj = _PyNumber_Index(args[3]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -5627,7 +5627,7 @@ os_copy_file_range(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -7512,7 +7512,7 @@ os_urandom(PyObject *module, PyObject *arg) { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(arg); + PyObject *iobj = _PyNumber_Index(arg); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -8160,7 +8160,7 @@ os_getrandom(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -8877,4 +8877,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=9623b9e6f3809842 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=767780ea3beacf34 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/zlibmodule.c.h b/Modules/clinic/zlibmodule.c.h index 2b72aeb8df56f..61dfa9a87b5fb 100644 --- a/Modules/clinic/zlibmodule.c.h +++ b/Modules/clinic/zlibmodule.c.h @@ -117,7 +117,7 @@ zlib_decompress(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -409,7 +409,7 @@ zlib_Decompress_decompress(compobject *self, PyObject *const *args, Py_ssize_t n } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -611,7 +611,7 @@ zlib_Decompress_flush(compobject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -757,4 +757,4 @@ zlib_crc32(PyObject *module, PyObject *const *args, Py_ssize_t nargs) #ifndef ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #define ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #endif /* !defined(ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF) */ -/*[clinic end generated code: output=06b6438506aab0cb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=be34f273564e39a8 input=a9049054013a1b77]*/ diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index 5b96631d633c1..cb05ce7c50962 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -844,7 +844,7 @@ math_gcd(PyObject *module, PyObject * const *args, Py_ssize_t nargs) return res; } for (i = 1; i < nargs; i++) { - x = PyNumber_Index(args[i]); + x = _PyNumber_Index(args[i]); if (x == NULL) { Py_DECREF(res); return NULL; @@ -1723,7 +1723,7 @@ math_isqrt(PyObject *module, PyObject *n) uint64_t m, u; PyObject *a = NULL, *b; - n = PyNumber_Index(n); + n = _PyNumber_Index(n); if (n == NULL) { return NULL; } @@ -3103,24 +3103,11 @@ math_perm_impl(PyObject *module, PyObject *n, PyObject *k) if (n == NULL) { return NULL; } - if (!PyLong_CheckExact(n)) { - Py_SETREF(n, _PyLong_Copy((PyLongObject *)n)); - if (n == NULL) { - return NULL; - } - } k = PyNumber_Index(k); if (k == NULL) { Py_DECREF(n); return NULL; } - if (!PyLong_CheckExact(k)) { - Py_SETREF(k, _PyLong_Copy((PyLongObject *)k)); - if (k == NULL) { - Py_DECREF(n); - return NULL; - } - } if (Py_SIZE(n) < 0) { PyErr_SetString(PyExc_ValueError, @@ -3226,24 +3213,11 @@ math_comb_impl(PyObject *module, PyObject *n, PyObject *k) if (n == NULL) { return NULL; } - if (!PyLong_CheckExact(n)) { - Py_SETREF(n, _PyLong_Copy((PyLongObject *)n)); - if (n == NULL) { - return NULL; - } - } k = PyNumber_Index(k); if (k == NULL) { Py_DECREF(n); return NULL; } - if (!PyLong_CheckExact(k)) { - Py_SETREF(k, _PyLong_Copy((PyLongObject *)k)); - if (k == NULL) { - Py_DECREF(n); - return NULL; - } - } if (Py_SIZE(n) < 0) { PyErr_SetString(PyExc_ValueError, diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 47ae7a8c22ac2..747184415e8bc 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -531,7 +531,7 @@ _Py_Uid_Converter(PyObject *obj, void *p) long result; unsigned long uresult; - index = PyNumber_Index(obj); + index = _PyNumber_Index(obj); if (index == NULL) { PyErr_Format(PyExc_TypeError, "uid should be integer, not %.200s", @@ -637,7 +637,7 @@ _Py_Gid_Converter(PyObject *obj, void *p) long result; unsigned long uresult; - index = PyNumber_Index(obj); + index = _PyNumber_Index(obj); if (index == NULL) { PyErr_Format(PyExc_TypeError, "gid should be integer, not %.200s", @@ -771,7 +771,7 @@ _fd_converter(PyObject *o, int *p) int overflow; long long_value; - PyObject *index = PyNumber_Index(o); + PyObject *index = _PyNumber_Index(o); if (index == NULL) { return 0; } diff --git a/Objects/abstract.c b/Objects/abstract.c index e8198492c63e5..973c43fe7fda9 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -1317,11 +1317,12 @@ PyIndex_Check(PyObject *obj) /* Return a Python int from the object item. + Can return an instance of int subclass. Raise TypeError if the result is not an int or if the object cannot be interpreted as an index. */ PyObject * -PyNumber_Index(PyObject *item) +_PyNumber_Index(PyObject *item) { PyObject *result = NULL; if (item == NULL) { @@ -1360,6 +1361,20 @@ PyNumber_Index(PyObject *item) return result; } +/* Return an exact Python int from the object item. + Raise TypeError if the result is not an int + or if the object cannot be interpreted as an index. +*/ +PyObject * +PyNumber_Index(PyObject *item) +{ + PyObject *result = _PyNumber_Index(item); + if (result != NULL && !PyLong_CheckExact(result)) { + Py_SETREF(result, _PyLong_Copy((PyLongObject *)result)); + } + return result; +} + /* Return an error on Overflow only if err is not NULL*/ Py_ssize_t @@ -1367,7 +1382,7 @@ PyNumber_AsSsize_t(PyObject *item, PyObject *err) { Py_ssize_t result; PyObject *runerr; - PyObject *value = PyNumber_Index(item); + PyObject *value = _PyNumber_Index(item); if (value == NULL) return -1; @@ -1451,11 +1466,7 @@ PyNumber_Long(PyObject *o) return result; } if (m && m->nb_index) { - result = PyNumber_Index(o); - if (result != NULL && !PyLong_CheckExact(result)) { - Py_SETREF(result, _PyLong_Copy((PyLongObject *)result)); - } - return result; + return PyNumber_Index(o); } trunc_func = _PyObject_LookupSpecial(o, &PyId___trunc__); if (trunc_func) { @@ -1479,9 +1490,6 @@ PyNumber_Long(PyObject *o) return NULL; } Py_SETREF(result, PyNumber_Index(result)); - if (result != NULL && !PyLong_CheckExact(result)) { - Py_SETREF(result, _PyLong_Copy((PyLongObject *)result)); - } return result; } if (PyErr_Occurred()) @@ -1564,7 +1572,7 @@ PyNumber_Float(PyObject *o) return PyFloat_FromDouble(val); } if (m && m->nb_index) { - PyObject *res = PyNumber_Index(o); + PyObject *res = _PyNumber_Index(o); if (!res) { return NULL; } @@ -1590,7 +1598,7 @@ PyNumber_ToBase(PyObject *n, int base) "PyNumber_ToBase: base must be 2, 8, 10 or 16"); return NULL; } - PyObject *index = PyNumber_Index(n); + PyObject *index = _PyNumber_Index(n); if (!index) return NULL; PyObject *res = _PyLong_Format(index, base); diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 25d9814dd6d8b..8d6454059ef88 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -455,7 +455,7 @@ formatlong(PyObject *v, int flags, int prec, int type) if (PyNumber_Check(v)) { /* make sure number is a type of integer for o, x, and X */ if (type == 'o' || type == 'x' || type == 'X') - iobj = PyNumber_Index(v); + iobj = _PyNumber_Index(v); else iobj = PyNumber_Long(v); if (iobj == NULL) { diff --git a/Objects/clinic/bytearrayobject.c.h b/Objects/clinic/bytearrayobject.c.h index 83b0d03c56908..cbe6f20344e8c 100644 --- a/Objects/clinic/bytearrayobject.c.h +++ b/Objects/clinic/bytearrayobject.c.h @@ -270,7 +270,7 @@ bytearray_replace(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nar } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -343,7 +343,7 @@ bytearray_split(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -442,7 +442,7 @@ bytearray_rsplit(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -506,7 +506,7 @@ bytearray_insert(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -599,7 +599,7 @@ bytearray_pop(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -1051,4 +1051,4 @@ bytearray_sizeof(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) { return bytearray_sizeof_impl(self); } -/*[clinic end generated code: output=920748990279fb9d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0cd59180c7d5dce5 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/bytesobject.c.h b/Objects/clinic/bytesobject.c.h index c4a2d0c362611..201627eee23e5 100644 --- a/Objects/clinic/bytesobject.c.h +++ b/Objects/clinic/bytesobject.c.h @@ -48,7 +48,7 @@ bytes_split(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -199,7 +199,7 @@ bytes_rsplit(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObj } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -485,7 +485,7 @@ bytes_replace(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -809,4 +809,4 @@ bytes_hex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject exit: return return_value; } -/*[clinic end generated code: output=a0c31faea2671a8c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=dc1bc13e6990e452 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/listobject.c.h b/Objects/clinic/listobject.c.h index 82884a42b57df..01e31d76cfa17 100644 --- a/Objects/clinic/listobject.c.h +++ b/Objects/clinic/listobject.c.h @@ -26,7 +26,7 @@ list_insert(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -125,7 +125,7 @@ list_pop(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -352,4 +352,4 @@ list___reversed__(PyListObject *self, PyObject *Py_UNUSED(ignored)) { return list___reversed___impl(self); } -/*[clinic end generated code: output=137db7b11196b581 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0063aad535edf62d input=a9049054013a1b77]*/ diff --git a/Objects/clinic/longobject.c.h b/Objects/clinic/longobject.c.h index d3d5c1992b3ea..7db89650aea63 100644 --- a/Objects/clinic/longobject.c.h +++ b/Objects/clinic/longobject.c.h @@ -211,7 +211,7 @@ int_to_bytes(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject * } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -308,4 +308,4 @@ int_from_bytes(PyTypeObject *type, PyObject *const *args, Py_ssize_t nargs, PyOb exit: return return_value; } -/*[clinic end generated code: output=46d40c8aa6d420b7 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=63b8274fc784d617 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/unicodeobject.c.h b/Objects/clinic/unicodeobject.c.h index 2d81730d687cd..ecd409e84cb77 100644 --- a/Objects/clinic/unicodeobject.c.h +++ b/Objects/clinic/unicodeobject.c.h @@ -88,7 +88,7 @@ unicode_center(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -522,7 +522,7 @@ unicode_ljust(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -717,7 +717,7 @@ unicode_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -831,7 +831,7 @@ unicode_rjust(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -900,7 +900,7 @@ unicode_split(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -997,7 +997,7 @@ unicode_rsplit(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[1]); + PyObject *iobj = _PyNumber_Index(args[1]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -1193,7 +1193,7 @@ unicode_zfill(PyObject *self, PyObject *arg) { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(arg); + PyObject *iobj = _PyNumber_Index(arg); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -1258,4 +1258,4 @@ unicode_sizeof(PyObject *self, PyObject *Py_UNUSED(ignored)) { return unicode_sizeof_impl(self); } -/*[clinic end generated code: output=ea1aff10c743be14 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c5eb21e314da78b8 input=a9049054013a1b77]*/ diff --git a/Objects/floatobject.c b/Objects/floatobject.c index cc0ae8ce81908..868b7298a9e8d 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -250,7 +250,7 @@ PyFloat_AsDouble(PyObject *op) nb = Py_TYPE(op)->tp_as_number; if (nb == NULL || nb->nb_float == NULL) { if (nb && nb->nb_index) { - PyObject *res = PyNumber_Index(op); + PyObject *res = _PyNumber_Index(op); if (!res) { return -1; } diff --git a/Objects/longobject.c b/Objects/longobject.c index e040d6c87b923..4ae17c972c215 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -394,7 +394,7 @@ PyLong_AsLongAndOverflow(PyObject *vv, int *overflow) v = (PyLongObject *)vv; } else { - v = (PyLongObject *)PyNumber_Index(vv); + v = (PyLongObject *)_PyNumber_Index(vv); if (v == NULL) return -1; do_decref = 1; @@ -674,7 +674,7 @@ PyLong_AsUnsignedLongMask(PyObject *op) return _PyLong_AsUnsignedLongMask(op); } - lo = (PyLongObject *)PyNumber_Index(op); + lo = (PyLongObject *)_PyNumber_Index(op); if (lo == NULL) return (unsigned long)-1; @@ -1132,7 +1132,7 @@ PyLong_AsLongLong(PyObject *vv) v = (PyLongObject *)vv; } else { - v = (PyLongObject *)PyNumber_Index(vv); + v = (PyLongObject *)_PyNumber_Index(vv); if (v == NULL) return -1; do_decref = 1; @@ -1247,7 +1247,7 @@ PyLong_AsUnsignedLongLongMask(PyObject *op) return _PyLong_AsUnsignedLongLongMask(op); } - lo = (PyLongObject *)PyNumber_Index(op); + lo = (PyLongObject *)_PyNumber_Index(op); if (lo == NULL) return (unsigned long long)-1; @@ -1287,7 +1287,7 @@ PyLong_AsLongLongAndOverflow(PyObject *vv, int *overflow) v = (PyLongObject *)vv; } else { - v = (PyLongObject *)PyNumber_Index(vv); + v = (PyLongObject *)_PyNumber_Index(vv); if (v == NULL) return -1; do_decref = 1; @@ -5180,7 +5180,7 @@ long_round(PyObject *self, PyObject *args) if (o_ndigits == NULL) return long_long(self); - ndigits = PyNumber_Index(o_ndigits); + ndigits = _PyNumber_Index(o_ndigits); if (ndigits == NULL) return NULL; diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index 682bbe8a61e85..e3d3bd6a174ff 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -1578,7 +1578,7 @@ pylong_as_ld(PyObject *item) PyObject *tmp; long ld; - tmp = PyNumber_Index(item); + tmp = _PyNumber_Index(item); if (tmp == NULL) return -1; @@ -1593,7 +1593,7 @@ pylong_as_lu(PyObject *item) PyObject *tmp; unsigned long lu; - tmp = PyNumber_Index(item); + tmp = _PyNumber_Index(item); if (tmp == NULL) return (unsigned long)-1; @@ -1608,7 +1608,7 @@ pylong_as_lld(PyObject *item) PyObject *tmp; long long lld; - tmp = PyNumber_Index(item); + tmp = _PyNumber_Index(item); if (tmp == NULL) return -1; @@ -1623,7 +1623,7 @@ pylong_as_llu(PyObject *item) PyObject *tmp; unsigned long long llu; - tmp = PyNumber_Index(item); + tmp = _PyNumber_Index(item); if (tmp == NULL) return (unsigned long long)-1; @@ -1638,7 +1638,7 @@ pylong_as_zd(PyObject *item) PyObject *tmp; Py_ssize_t zd; - tmp = PyNumber_Index(item); + tmp = _PyNumber_Index(item); if (tmp == NULL) return -1; @@ -1653,7 +1653,7 @@ pylong_as_zu(PyObject *item) PyObject *tmp; size_t zu; - tmp = PyNumber_Index(item); + tmp = _PyNumber_Index(item); if (tmp == NULL) return (size_t)-1; diff --git a/Objects/stringlib/clinic/transmogrify.h.h b/Objects/stringlib/clinic/transmogrify.h.h index 8dd7e6b5bb9e8..a5135a0cba0f2 100644 --- a/Objects/stringlib/clinic/transmogrify.h.h +++ b/Objects/stringlib/clinic/transmogrify.h.h @@ -70,7 +70,7 @@ stringlib_ljust(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -126,7 +126,7 @@ stringlib_rjust(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -182,7 +182,7 @@ stringlib_center(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[0]); + PyObject *iobj = _PyNumber_Index(args[0]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -234,7 +234,7 @@ stringlib_zfill(PyObject *self, PyObject *arg) { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(arg); + PyObject *iobj = _PyNumber_Index(arg); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -249,4 +249,4 @@ stringlib_zfill(PyObject *self, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=cd5ecdbf1d9e849a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2d9abc7b1cffeca6 input=a9049054013a1b77]*/ diff --git a/Objects/typeobject.c b/Objects/typeobject.c index ba2a852cdda4f..1d556e96be5f2 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -6334,7 +6334,7 @@ slot_sq_length(PyObject *self) if (res == NULL) return -1; - Py_SETREF(res, PyNumber_Index(res)); + Py_SETREF(res, _PyNumber_Index(res)); if (res == NULL) return -1; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index ea46a44bf5faa..511640438d015 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -14617,7 +14617,7 @@ mainformatlong(PyObject *v, /* make sure number is a type of integer for o, x, and X */ if (!PyLong_Check(v)) { if (type == 'o' || type == 'x' || type == 'X') { - iobj = PyNumber_Index(v); + iobj = _PyNumber_Index(v); if (iobj == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) goto wrongtype; diff --git a/Python/clinic/_warnings.c.h b/Python/clinic/_warnings.c.h index 80ed2ae8ab123..ad6b9a8e2428c 100644 --- a/Python/clinic/_warnings.c.h +++ b/Python/clinic/_warnings.c.h @@ -45,7 +45,7 @@ warnings_warn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec if (args[2]) { { Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index(args[2]); + PyObject *iobj = _PyNumber_Index(args[2]); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); @@ -66,4 +66,4 @@ warnings_warn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=484e5ffe94edf0f0 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=eb9997fa998fdbad input=a9049054013a1b77]*/ diff --git a/Python/getargs.c b/Python/getargs.c index 63afae25494cd..524ad917cd08a 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -784,7 +784,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, PyObject *iobj; Py_ssize_t *p = va_arg(*p_va, Py_ssize_t *); Py_ssize_t ival = -1; - iobj = PyNumber_Index(arg); + iobj = _PyNumber_Index(arg); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index 0f40e0679f007..b1bf7826ebf9f 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -3062,7 +3062,7 @@ def parse_arg(self, argname, displayname): return """ {{{{ Py_ssize_t ival = -1; - PyObject *iobj = PyNumber_Index({argname}); + PyObject *iobj = _PyNumber_Index({argname}); if (iobj != NULL) {{{{ ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); From webhook-mailer at python.org Thu May 28 09:57:54 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 28 May 2020 13:57:54 -0000 Subject: [Python-checkins] Revert "Upgrade bundled versions of pip & setuptools (#16782)" (GH-20484) Message-ID: https://github.com/python/cpython/commit/4fd4963ccce5c12f742303dab6e43818b1133c7e commit: 4fd4963ccce5c12f742303dab6e43818b1133c7e branch: master author: Victor Stinner committer: GitHub date: 2020-05-28T15:57:49+02:00 summary: Revert "Upgrade bundled versions of pip & setuptools (#16782)" (GH-20484) This reverts commit feb0846c3a28b05b4cfbc6ab34c764957f3eff55. files: A Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl A Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl D Lib/ensurepip/_bundled/pip-20.1-py2.py3-none-any.whl D Lib/ensurepip/_bundled/setuptools-46.1.3-py3-none-any.whl D Misc/NEWS.d/next/Library/2019-10-15-23-28-11.bpo-38488.hFQNgA.rst M Lib/ensurepip/__init__.py diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 1ec49714c6459..545fce656fd6f 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -12,13 +12,13 @@ __all__ = ["version", "bootstrap"] -_SETUPTOOLS_VERSION = "46.1.3" +_SETUPTOOLS_VERSION = "41.2.0" -_PIP_VERSION = "20.1" +_PIP_VERSION = "19.2.3" _PROJECTS = [ - ("setuptools", _SETUPTOOLS_VERSION, 'py3'), - ("pip", _PIP_VERSION, 'py2.py3'), + ("setuptools", _SETUPTOOLS_VERSION), + ("pip", _PIP_VERSION), ] @@ -107,8 +107,8 @@ def _bootstrap(*, root=None, upgrade=False, user=False, # Put our bundled wheels into a temporary directory and construct the # additional paths that need added to sys.path additional_paths = [] - for project, version, py_tag in _PROJECTS: - wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag) + for project, version in _PROJECTS: + wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) whl = resources.read_binary( _bundled, wheel_name, diff --git a/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl new file mode 100644 index 0000000000000..8118df8ac1940 Binary files /dev/null and b/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl differ diff --git a/Lib/ensurepip/_bundled/pip-20.1-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-20.1-py2.py3-none-any.whl deleted file mode 100644 index 925a59f4c2d8a..0000000000000 Binary files a/Lib/ensurepip/_bundled/pip-20.1-py2.py3-none-any.whl and /dev/null differ diff --git a/Lib/ensurepip/_bundled/setuptools-46.1.3-py3-none-any.whl b/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl similarity index 65% rename from Lib/ensurepip/_bundled/setuptools-46.1.3-py3-none-any.whl rename to Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl index fc3f6ccf4277d..82df6f63f4ee9 100644 Binary files a/Lib/ensurepip/_bundled/setuptools-46.1.3-py3-none-any.whl and b/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl differ diff --git a/Misc/NEWS.d/next/Library/2019-10-15-23-28-11.bpo-38488.hFQNgA.rst b/Misc/NEWS.d/next/Library/2019-10-15-23-28-11.bpo-38488.hFQNgA.rst deleted file mode 100644 index 95cf2f1b5ed46..0000000000000 --- a/Misc/NEWS.d/next/Library/2019-10-15-23-28-11.bpo-38488.hFQNgA.rst +++ /dev/null @@ -1 +0,0 @@ -Update ensurepip to install pip 20.1 and setuptools 46.1.3. From webhook-mailer at python.org Thu May 28 10:08:58 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 28 May 2020 14:08:58 -0000 Subject: [Python-checkins] bpo-30064: Fix unstable asyncio "racing" socket tests (GH-20485) Message-ID: https://github.com/python/cpython/commit/84ee7e1573d166fe7a9be676813e12523b62ab24 commit: 84ee7e1573d166fe7a9be676813e12523b62ab24 branch: master author: Victor Stinner committer: GitHub date: 2020-05-28T16:08:50+02:00 summary: bpo-30064: Fix unstable asyncio "racing" socket tests (GH-20485) Skip new "racing" socket tests which fail randomly until someone fix them, to ease analysis of buildbot failures (skip tests which are known to be broken/unstable). files: M Lib/test/test_asyncio/test_sock_lowlevel.py diff --git a/Lib/test/test_asyncio/test_sock_lowlevel.py b/Lib/test/test_asyncio/test_sock_lowlevel.py index 5e6a90abb46cf..e0583c0419564 100644 --- a/Lib/test/test_asyncio/test_sock_lowlevel.py +++ b/Lib/test/test_asyncio/test_sock_lowlevel.py @@ -2,6 +2,8 @@ import time import asyncio import sys +import unittest + from asyncio import proactor_events from itertools import cycle, islice from test.test_asyncio import utils as test_utils @@ -232,6 +234,8 @@ async def _basetest_sock_connect_racing(self, listener, sock): # avoid touching event loop to maintain race condition time.sleep(0.01) + # FIXME: https://bugs.python.org/issue30064#msg370143 + @unittest.skipIf(True, "unstable test") def test_sock_client_racing(self): with test_utils.run_test_server() as httpd: sock = socket.socket() From webhook-mailer at python.org Thu May 28 10:24:44 2020 From: webhook-mailer at python.org (Hai Shi) Date: Thu, 28 May 2020 14:24:44 -0000 Subject: [Python-checkins] bpo-40275: Remove test.support.TESTFN_ENCODING (GH-20482) Message-ID: https://github.com/python/cpython/commit/24bddc1b3b58f6899b2d412e51b37f68536e4fe2 commit: 24bddc1b3b58f6899b2d412e51b37f68536e4fe2 branch: master author: Hai Shi committer: GitHub date: 2020-05-28T16:24:39+02:00 summary: bpo-40275: Remove test.support.TESTFN_ENCODING (GH-20482) Replace test.support.TESTFN_ENCODING with sys.getfilesystemencoding(). files: M Doc/library/test.rst M Lib/test/support/__init__.py M Lib/test/test_sax.py M Lib/test/test_unicode_file.py diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 7bee6e8031a05..7580fb5e9b174 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -263,11 +263,6 @@ The :mod:`test.support` module defines the following constants: Set to a non-ASCII name for a temporary file. -.. data:: TESTFN_ENCODING - - Set to :func:`sys.getfilesystemencoding`. - - .. data:: TESTFN_UNENCODABLE Set to a filename (str type) that should not be able to be encoded by file diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 4b87a0c574c1b..bb905bd895de8 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -785,7 +785,6 @@ def requires_lzma(reason='requires lzma'): # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html import unicodedata TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) -TESTFN_ENCODING = sys.getfilesystemencoding() # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be # encoded by the filesystem encoding (in strict mode). It can be None if we @@ -798,23 +797,23 @@ def requires_lzma(reason='requires lzma'): # probability that the whole name is encodable to MBCS (issue #9819) TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" try: - TESTFN_UNENCODABLE.encode(TESTFN_ENCODING) + TESTFN_UNENCODABLE.encode(sys.getfilesystemencoding()) except UnicodeEncodeError: pass else: print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' 'Unicode filename tests may not be effective' - % (TESTFN_UNENCODABLE, TESTFN_ENCODING)) + % (TESTFN_UNENCODABLE, sys.getfilesystemencoding())) TESTFN_UNENCODABLE = None # Mac OS X denies unencodable filenames (invalid utf-8) elif sys.platform != 'darwin': try: # ascii and utf-8 cannot encode the byte 0xff - b'\xff'.decode(TESTFN_ENCODING) + b'\xff'.decode(sys.getfilesystemencoding()) except UnicodeDecodeError: # 0xff will be encoded using the surrogate character u+DCFF TESTFN_UNENCODABLE = TESTFN \ - + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape') + + b'-\xff'.decode(sys.getfilesystemencoding(), 'surrogateescape') else: # File system encoding (eg. ISO-8859-* encodings) can encode # the byte 0xff. Skip some unicode filename tests. @@ -845,7 +844,7 @@ def requires_lzma(reason='requires lzma'): b'\x81\x98', ): try: - name.decode(TESTFN_ENCODING) + name.decode(sys.getfilesystemencoding()) except UnicodeDecodeError: TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name break diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py index ce3a422b502a0..bc77103641b6f 100644 --- a/Lib/test/test_sax.py +++ b/Lib/test/test_sax.py @@ -19,6 +19,7 @@ import codecs import os.path import shutil +import sys from urllib.error import URLError import urllib.request from test import support @@ -35,7 +36,7 @@ supports_nonascii_filenames = True if not os.path.supports_unicode_filenames: try: - support.TESTFN_UNICODE.encode(support.TESTFN_ENCODING) + support.TESTFN_UNICODE.encode(sys.getfilesystemencoding()) except (UnicodeError, TypeError): # Either the file system encoding is None, or the file name # cannot be encoded in the file system encoding. diff --git a/Lib/test/test_unicode_file.py b/Lib/test/test_unicode_file.py index b16e4c5b3bd61..ed1f6cecc7856 100644 --- a/Lib/test/test_unicode_file.py +++ b/Lib/test/test_unicode_file.py @@ -2,15 +2,16 @@ # We don't test many operations on files other than # that their names can be used with Unicode characters. import os, glob, time, shutil +import sys import unicodedata import unittest from test.support import (run_unittest, rmtree, change_cwd, - TESTFN_ENCODING, TESTFN_UNICODE, TESTFN_UNENCODABLE, create_empty_file) + TESTFN_UNICODE, TESTFN_UNENCODABLE, create_empty_file) if not os.path.supports_unicode_filenames: try: - TESTFN_UNICODE.encode(TESTFN_ENCODING) + TESTFN_UNICODE.encode(sys.getfilesystemencoding()) except (UnicodeError, TypeError): # Either the file system encoding is None, or the file name # cannot be encoded in the file system encoding. From webhook-mailer at python.org Thu May 28 10:37:45 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 28 May 2020 14:37:45 -0000 Subject: [Python-checkins] bpo-40275: Fix test.support.threading_helper (GH-20488) Message-ID: https://github.com/python/cpython/commit/753643205a28531fd43ef36b40b86282ae6956a7 commit: 753643205a28531fd43ef36b40b86282ae6956a7 branch: master author: Victor Stinner committer: GitHub date: 2020-05-28T16:37:37+02:00 summary: bpo-40275: Fix test.support.threading_helper (GH-20488) * Add missing sys import * Get verbose and gc_collect() from test.support * Remove unused starttime variable. Issues spotted by pyflakes. files: M Lib/test/support/threading_helper.py diff --git a/Lib/test/support/threading_helper.py b/Lib/test/support/threading_helper.py index 96f7b3fcebfac..0632577cdb303 100644 --- a/Lib/test/support/threading_helper.py +++ b/Lib/test/support/threading_helper.py @@ -1,6 +1,7 @@ +import _thread import contextlib import functools -import _thread +import sys import threading import time @@ -47,7 +48,7 @@ def threading_cleanup(*original_values): values = None time.sleep(0.01) - gc_collect() + support.gc_collect() def reap_threads(func): @@ -98,7 +99,7 @@ def wait_threads_exit(timeout=None): f"(count: {count}, old count: {old_count})") raise AssertionError(msg) time.sleep(0.010) - gc_collect() + support.gc_collect() def join_thread(thread, timeout=None): @@ -124,7 +125,7 @@ def start_threads(threads, unlock=None): t.start() started.append(t) except: - if verbose: + if support.verbose: print("Can't start %d threads, only %d threads started" % (len(threads), len(started))) raise @@ -133,7 +134,7 @@ def start_threads(threads, unlock=None): try: if unlock: unlock() - endtime = starttime = time.monotonic() + endtime = time.monotonic() for timeout in range(1, 16): endtime += 60 for t in started: @@ -141,7 +142,7 @@ def start_threads(threads, unlock=None): started = [t for t in started if t.is_alive()] if not started: break - if verbose: + if support.verbose: print('Unable to join %d threads during a period of ' '%d minutes' % (len(started), timeout)) finally: From webhook-mailer at python.org Thu May 28 11:23:48 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 28 May 2020 15:23:48 -0000 Subject: [Python-checkins] bpo-25920: Remove socket.getaddrinfo() lock on macOS (GH-20177) Message-ID: https://github.com/python/cpython/commit/0de437de6210c2b32b09d6c47a805b23d023bd59 commit: 0de437de6210c2b32b09d6c47a805b23d023bd59 branch: master author: Victor Stinner committer: GitHub date: 2020-05-28T17:23:39+02:00 summary: bpo-25920: Remove socket.getaddrinfo() lock on macOS (GH-20177) On macOS, socket.getaddrinfo() no longer uses an internal lock to prevent race conditions when calling getaddrinfo(). getaddrinfo is thread-safe is macOS 10.5, whereas Python 3.9 requires macOS 10.6 or newer. The lock was also used on FreeBSD older than 5.3, OpenBSD older than 201311 and NetBSD older than 4. files: A Misc/NEWS.d/next/Library/2020-05-18-15-38-25.bpo-25920.PxrLY8.rst M Modules/socketmodule.c diff --git a/Misc/NEWS.d/next/Library/2020-05-18-15-38-25.bpo-25920.PxrLY8.rst b/Misc/NEWS.d/next/Library/2020-05-18-15-38-25.bpo-25920.PxrLY8.rst new file mode 100644 index 0000000000000..cc60e976286c3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-18-15-38-25.bpo-25920.PxrLY8.rst @@ -0,0 +1,7 @@ +On macOS, when building Python for macOS 10.4 and older, which wasn't the case +for python.org macOS installer, :func:`socket.getaddrinfo` no longer uses an +internal lock to prevent race conditions when calling ``getaddrinfo()`` which +is thread-safe since macOS 10.5. Python 3.9 requires macOS 10.6 or newer. The +internal lock caused random hang on fork when another thread was calling +:func:`socket.getaddrinfo`. The lock was also used on FreeBSD older than 5.3, +OpenBSD older than 201311 and NetBSD older than 4. diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 92c246ebea76f..f60a27ebe408c 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -197,43 +197,6 @@ if_indextoname(index) -- return the corresponding interface name\n\ # define USE_GETHOSTBYNAME_LOCK #endif -/* To use __FreeBSD_version, __OpenBSD__, and __NetBSD_Version__ */ -#ifdef HAVE_SYS_PARAM_H -#include -#endif -/* On systems on which getaddrinfo() is believed to not be thread-safe, - (this includes the getaddrinfo emulation) protect access with a lock. - - getaddrinfo is thread-safe on Mac OS X 10.5 and later. Originally it was - a mix of code including an unsafe implementation from an old BSD's - libresolv. In 10.5 Apple reimplemented it as a safe IPC call to the - mDNSResponder process. 10.5 is the first be UNIX '03 certified, which - includes the requirement that getaddrinfo be thread-safe. See issue #25924. - - It's thread-safe in OpenBSD starting with 5.4, released Nov 2013: - http://www.openbsd.org/plus54.html - - It's thread-safe in NetBSD starting with 4.0, released Dec 2007: - -http://cvsweb.netbsd.org/bsdweb.cgi/src/lib/libc/net/getaddrinfo.c.diff?r1=1.82&r2=1.83 - */ -#if ((defined(__APPLE__) && \ - MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_5) || \ - (defined(__FreeBSD__) && __FreeBSD_version+0 < 503000) || \ - (defined(__OpenBSD__) && OpenBSD+0 < 201311) || \ - (defined(__NetBSD__) && __NetBSD_Version__+0 < 400000000) || \ - !defined(HAVE_GETADDRINFO)) -#define USE_GETADDRINFO_LOCK -#endif - -#ifdef USE_GETADDRINFO_LOCK -#define ACQUIRE_GETADDRINFO_LOCK PyThread_acquire_lock(netdb_lock, 1); -#define RELEASE_GETADDRINFO_LOCK PyThread_release_lock(netdb_lock); -#else -#define ACQUIRE_GETADDRINFO_LOCK -#define RELEASE_GETADDRINFO_LOCK -#endif - #if defined(__APPLE__) || defined(__CYGWIN__) || defined(__NetBSD__) # include #endif @@ -1061,7 +1024,7 @@ new_sockobject(SOCKET_T fd, int family, int type, int proto) /* Lock to allow python interpreter to continue, but only allow one thread to be in gethostbyname or getaddrinfo */ -#if defined(USE_GETHOSTBYNAME_LOCK) || defined(USE_GETADDRINFO_LOCK) +#if defined(USE_GETHOSTBYNAME_LOCK) static PyThread_type_lock netdb_lock; #endif @@ -1086,14 +1049,12 @@ setipaddr(const char *name, struct sockaddr *addr_ret, size_t addr_ret_size, int hints.ai_socktype = SOCK_DGRAM; /*dummy*/ hints.ai_flags = AI_PASSIVE; Py_BEGIN_ALLOW_THREADS - ACQUIRE_GETADDRINFO_LOCK error = getaddrinfo(NULL, "0", &hints, &res); Py_END_ALLOW_THREADS /* We assume that those thread-unsafe getaddrinfo() versions *are* safe regarding their return value, ie. that a subsequent call to getaddrinfo() does not destroy the outcome of the first call. */ - RELEASE_GETADDRINFO_LOCK if (error) { set_gaierror(error); return -1; @@ -1194,7 +1155,6 @@ setipaddr(const char *name, struct sockaddr *addr_ret, size_t addr_ret_size, int memset(&hints, 0, sizeof(hints)); hints.ai_family = af; Py_BEGIN_ALLOW_THREADS - ACQUIRE_GETADDRINFO_LOCK error = getaddrinfo(name, NULL, &hints, &res); #if defined(__digital__) && defined(__unix__) if (error == EAI_NONAME && af == AF_UNSPEC) { @@ -1205,7 +1165,6 @@ setipaddr(const char *name, struct sockaddr *addr_ret, size_t addr_ret_size, int } #endif Py_END_ALLOW_THREADS - RELEASE_GETADDRINFO_LOCK /* see comment in setipaddr() */ if (error) { set_gaierror(error); return -1; @@ -6563,10 +6522,8 @@ socket_getaddrinfo(PyObject *self, PyObject *args, PyObject* kwargs) hints.ai_protocol = protocol; hints.ai_flags = flags; Py_BEGIN_ALLOW_THREADS - ACQUIRE_GETADDRINFO_LOCK error = getaddrinfo(hptr, pptr, &hints, &res0); Py_END_ALLOW_THREADS - RELEASE_GETADDRINFO_LOCK /* see comment in setipaddr() */ if (error) { set_gaierror(error); goto err; @@ -6659,10 +6616,8 @@ socket_getnameinfo(PyObject *self, PyObject *args) hints.ai_socktype = SOCK_DGRAM; /* make numeric port happy */ hints.ai_flags = AI_NUMERICHOST; /* don't do any name resolution */ Py_BEGIN_ALLOW_THREADS - ACQUIRE_GETADDRINFO_LOCK error = getaddrinfo(hostp, pbuf, &hints, &res); Py_END_ALLOW_THREADS - RELEASE_GETADDRINFO_LOCK /* see comment in setipaddr() */ if (error) { set_gaierror(error); goto fail; @@ -8422,7 +8377,7 @@ PyInit__socket(void) #endif /* _MSTCPIP_ */ /* Initialize gethostbyname lock */ -#if defined(USE_GETHOSTBYNAME_LOCK) || defined(USE_GETADDRINFO_LOCK) +#if defined(USE_GETHOSTBYNAME_LOCK) netdb_lock = PyThread_allocate_lock(); #endif From webhook-mailer at python.org Thu May 28 12:26:37 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 28 May 2020 16:26:37 -0000 Subject: [Python-checkins] bpo-37878: PyThreadState_DeleteCurrent() was not removed (GH-20489) Message-ID: https://github.com/python/cpython/commit/fda7f6d61b13c68f59806db674e892fda4013348 commit: fda7f6d61b13c68f59806db674e892fda4013348 branch: master author: Victor Stinner committer: GitHub date: 2020-05-28T18:26:01+02:00 summary: bpo-37878: PyThreadState_DeleteCurrent() was not removed (GH-20489) Update What's New in Python 3.9. PyThreadState_DeleteCurrent was not removed, but excluded from the limited C API. files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 35496d7b8f5ef..a42ec09c6532f 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -829,9 +829,6 @@ Removed removed, standard :class:`bytes` objects are always used instead. (Contributed by Jon Janzen in :issue:`36409`.) -* The C function ``PyThreadState_DeleteCurrent()`` has been removed. It was not documented. - (Contributed by Joannah Nanjekye in :issue:`37878`.) - * The C function ``PyGen_NeedsFinalizing`` has been removed. It was not documented, tested, or used anywhere within CPython after the implementation of :pep:`442`. Patch by Joannah Nanjekye. @@ -1108,6 +1105,8 @@ Removed * Exclude the following functions from the limited C API: + * ``PyThreadState_DeleteCurrent()`` + (Contributed by Joannah Nanjekye in :issue:`37878`.) * ``_Py_CheckRecursionLimit`` * ``_Py_NewReference()`` * ``_Py_ForgetReference()`` From webhook-mailer at python.org Thu May 28 12:46:28 2020 From: webhook-mailer at python.org (Ramil Nugmanov) Date: Thu, 28 May 2020 16:46:28 -0000 Subject: [Python-checkins] bpo-40806: Clarify that itertools.product immediately consumes its inpt (GH-20492) Message-ID: https://github.com/python/cpython/commit/cfc6ce4d40f2f01314b7e283fb972a7bb3ed3faa commit: cfc6ce4d40f2f01314b7e283fb972a7bb3ed3faa branch: master author: Ramil Nugmanov committer: GitHub date: 2020-05-28T09:46:22-07:00 summary: bpo-40806: Clarify that itertools.product immediately consumes its inpt (GH-20492) files: M Doc/library/itertools.rst diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 3e5a673898106..3c94c4b35dc0e 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -563,6 +563,9 @@ loops that truncate the stream. for prod in result: yield tuple(prod) + Before :func:`product` runs, it completely consumes the input iterables, + keeping pools of values in memory to generate the products. Accordingly, + it only useful with finite inputs. .. function:: repeat(object[, times]) From webhook-mailer at python.org Thu May 28 14:51:53 2020 From: webhook-mailer at python.org (lrjball) Date: Thu, 28 May 2020 18:51:53 -0000 Subject: [Python-checkins] bpo-40474: Updated coverage.yml to better report coverage stats (#19851) Message-ID: https://github.com/python/cpython/commit/d9c1f1991969e99791de75b2bc935e6445bc5dcd commit: d9c1f1991969e99791de75b2bc935e6445bc5dcd branch: master author: lrjball <50599110+lrjball at users.noreply.github.com> committer: GitHub date: 2020-05-28T11:51:44-07:00 summary: bpo-40474: Updated coverage.yml to better report coverage stats (#19851) Currently modules which are imported early are misreported in coverage. A fix is documented in the devguide, but the fix wasn't being used in CI. files: M .github/workflows/coverage.yml M .travis.yml diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 4f46cbf1100e9..035348e8a6932 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -52,6 +52,7 @@ jobs: python -m pip install -U coverage python -m pip install -r Misc/requirements-test.txt python -m test.pythoninfo + export PYTHONPATH=`find .venv -name fullcoverage` - name: 'Tests with coverage' run: > source ./.venv/bin/activate && @@ -67,6 +68,7 @@ jobs: || true - name: 'Publish code coverage results' run: | + export PYTHONPATH= source ./.venv/bin/activate bash <(curl -s https://codecov.io/bash) -y .github/codecov.yml env: diff --git a/.travis.yml b/.travis.yml index 133385fbf5c71..5d57150e61c18 100644 --- a/.travis.yml +++ b/.travis.yml @@ -89,11 +89,13 @@ matrix: - ./venv/bin/python -m pip install -U coverage - ./venv/bin/python -m pip install -r Misc/requirements-test.txt - ./venv/bin/python -m test.pythoninfo + - export PYTHONPATH=`find venv -name fullcoverage` script: # Skip tests that re-run the entire test suite. - xvfb-run ./venv/bin/python -m coverage run --branch --pylib -m test --fail-env-changed -uall,-cpu -x test_multiprocessing_fork -x test_multiprocessing_forkserver -x test_multiprocessing_spawn -x test_concurrent_futures || true after_script: # Probably should be after_success once test suite updated to run under coverage.py. # Make the `coverage` command available to Codecov w/ a version of Python that can parse all source files. + - export PYTHONPATH= - source ./venv/bin/activate - bash <(curl -s https://codecov.io/bash) -y .github/codecov.yml - name: "Test code coverage (C)" From webhook-mailer at python.org Thu May 28 15:56:52 2020 From: webhook-mailer at python.org (Ruaridh Williamson) Date: Thu, 28 May 2020 19:56:52 -0000 Subject: [Python-checkins] Note the output ordering of combinatoric functions (GH-19732) Message-ID: https://github.com/python/cpython/commit/5e0ed8abc95c2e9b3da88f00b974f9621d2b79b0 commit: 5e0ed8abc95c2e9b3da88f00b974f9621d2b79b0 branch: master author: Ruaridh Williamson committer: GitHub date: 2020-05-28T12:56:43-07:00 summary: Note the output ordering of combinatoric functions (GH-19732) files: M Doc/library/itertools.rst diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 3c94c4b35dc0e..107bc515a6778 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -199,9 +199,9 @@ loops that truncate the stream. Return *r* length subsequences of elements from the input *iterable*. - Combinations are emitted in lexicographic sort order. So, if the - input *iterable* is sorted, the combination tuples will be produced - in sorted order. + The combination tuples are emitted in lexicographic ordering according to + the order of the input *iterable*. So, if the input *iterable* is sorted, + the combination tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their value. So if the input elements are unique, there will be no repeat @@ -248,9 +248,9 @@ loops that truncate the stream. Return *r* length subsequences of elements from the input *iterable* allowing individual elements to be repeated more than once. - Combinations are emitted in lexicographic sort order. So, if the - input *iterable* is sorted, the combination tuples will be produced - in sorted order. + The combination tuples are emitted in lexicographic ordering according to + the order of the input *iterable*. So, if the input *iterable* is sorted, + the combination tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their value. So if the input elements are unique, the generated combinations @@ -484,9 +484,9 @@ loops that truncate the stream. of the *iterable* and all possible full-length permutations are generated. - Permutations are emitted in lexicographic sort order. So, if the - input *iterable* is sorted, the permutation tuples will be produced - in sorted order. + The permutation tuples are emitted in lexicographic ordering according to + the order of the input *iterable*. So, if the input *iterable* is sorted, + the combination tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their value. So if the input elements are unique, there will be no repeat From webhook-mailer at python.org Thu May 28 16:34:02 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 28 May 2020 20:34:02 -0000 Subject: [Python-checkins] Note the output ordering of combinatoric functions (GH-19732) (GH-20502) Message-ID: https://github.com/python/cpython/commit/f89d7bcdeed22382aa2af5e3b3e1c68d21004bcb commit: f89d7bcdeed22382aa2af5e3b3e1c68d21004bcb branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-28T13:33:57-07:00 summary: Note the output ordering of combinatoric functions (GH-19732) (GH-20502) files: M Doc/library/itertools.rst diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 3e5a673898106..134afbe64b3ad 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -199,9 +199,9 @@ loops that truncate the stream. Return *r* length subsequences of elements from the input *iterable*. - Combinations are emitted in lexicographic sort order. So, if the - input *iterable* is sorted, the combination tuples will be produced - in sorted order. + The combination tuples are emitted in lexicographic ordering according to + the order of the input *iterable*. So, if the input *iterable* is sorted, + the combination tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their value. So if the input elements are unique, there will be no repeat @@ -248,9 +248,9 @@ loops that truncate the stream. Return *r* length subsequences of elements from the input *iterable* allowing individual elements to be repeated more than once. - Combinations are emitted in lexicographic sort order. So, if the - input *iterable* is sorted, the combination tuples will be produced - in sorted order. + The combination tuples are emitted in lexicographic ordering according to + the order of the input *iterable*. So, if the input *iterable* is sorted, + the combination tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their value. So if the input elements are unique, the generated combinations @@ -484,9 +484,9 @@ loops that truncate the stream. of the *iterable* and all possible full-length permutations are generated. - Permutations are emitted in lexicographic sort order. So, if the - input *iterable* is sorted, the permutation tuples will be produced - in sorted order. + The permutation tuples are emitted in lexicographic ordering according to + the order of the input *iterable*. So, if the input *iterable* is sorted, + the combination tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their value. So if the input elements are unique, there will be no repeat From webhook-mailer at python.org Thu May 28 17:56:51 2020 From: webhook-mailer at python.org (Fantix King) Date: Thu, 28 May 2020 21:56:51 -0000 Subject: [Python-checkins] bpo-30064: Properly skip unstable loop.sock_connect() racing test (GH-20494) Message-ID: https://github.com/python/cpython/commit/dc4eee9e266267498a6b783a0abccc23c06f2b87 commit: dc4eee9e266267498a6b783a0abccc23c06f2b87 branch: master author: Fantix King committer: GitHub date: 2020-05-28T14:56:42-07:00 summary: bpo-30064: Properly skip unstable loop.sock_connect() racing test (GH-20494) files: M Lib/test/test_asyncio/test_sock_lowlevel.py diff --git a/Lib/test/test_asyncio/test_sock_lowlevel.py b/Lib/test/test_asyncio/test_sock_lowlevel.py index e0583c0419564..2c8ce6b657c14 100644 --- a/Lib/test/test_asyncio/test_sock_lowlevel.py +++ b/Lib/test/test_asyncio/test_sock_lowlevel.py @@ -202,6 +202,14 @@ async def recv_until(): # ProactorEventLoop could deliver hello self.assertTrue(data.endswith(b'world')) + # After the first connect attempt before the listener is ready, + # the socket needs time to "recover" to make the next connect call. + # On Linux, a second retry will do. On Windows, the waiting time is + # unpredictable; and on FreeBSD the socket may never come back + # because it's a loopback address. Here we'll just retry for a few + # times, and have to skip the test if it's not working. See also: + # https://stackoverflow.com/a/54437602/3316267 + # https://lists.freebsd.org/pipermail/freebsd-current/2005-May/049876.html async def _basetest_sock_connect_racing(self, listener, sock): listener.bind(('127.0.0.1', 0)) addr = listener.getsockname() @@ -212,30 +220,26 @@ async def _basetest_sock_connect_racing(self, listener, sock): task.cancel() listener.listen(1) - i = 0 - while True: + + skip_reason = "Max retries reached" + for i in range(128): try: await self.loop.sock_connect(sock, addr) - break - except ConnectionRefusedError: # on Linux we need another retry - await self.loop.sock_connect(sock, addr) - break - except OSError as e: # on Windows we need more retries - # A connect request was made on an already connected socket - if getattr(e, 'winerror', 0) == 10056: - break + except ConnectionRefusedError as e: + skip_reason = e + except OSError as e: + skip_reason = e - # https://stackoverflow.com/a/54437602/3316267 + # Retry only for this error: + # [WinError 10022] An invalid argument was supplied if getattr(e, 'winerror', 0) != 10022: - raise - i += 1 - if i >= 128: - raise # too many retries - # avoid touching event loop to maintain race condition - time.sleep(0.01) - - # FIXME: https://bugs.python.org/issue30064#msg370143 - @unittest.skipIf(True, "unstable test") + break + else: + # success + return + + self.skipTest(skip_reason) + def test_sock_client_racing(self): with test_utils.run_test_server() as httpd: sock = socket.socket() @@ -251,6 +255,8 @@ def test_sock_client_racing(self): with listener, sock: self.loop.run_until_complete(asyncio.wait_for( self._basetest_sock_send_racing(listener, sock), 10)) + + def test_sock_client_connect_racing(self): listener = socket.socket() sock = socket.socket() with listener, sock: From webhook-mailer at python.org Thu May 28 19:27:41 2020 From: webhook-mailer at python.org (Erlend Egeberg Aasland) Date: Thu, 28 May 2020 23:27:41 -0000 Subject: [Python-checkins] bpo-40784: Fix sqlite3 deterministic test (GH-20448) Message-ID: https://github.com/python/cpython/commit/c610d970f5373b143bf5f5900d4645e6a90fb460 commit: c610d970f5373b143bf5f5900d4645e6a90fb460 branch: master author: Erlend Egeberg Aasland committer: GitHub date: 2020-05-29T02:27:31+03:00 summary: bpo-40784: Fix sqlite3 deterministic test (GH-20448) files: M Lib/sqlite3/test/userfunctions.py diff --git a/Lib/sqlite3/test/userfunctions.py b/Lib/sqlite3/test/userfunctions.py index 9501f535c4999..c11c82e127577 100644 --- a/Lib/sqlite3/test/userfunctions.py +++ b/Lib/sqlite3/test/userfunctions.py @@ -1,8 +1,7 @@ -#-*- coding: iso-8859-1 -*- # pysqlite2/test/userfunctions.py: tests for user-defined functions and # aggregates. # -# Copyright (C) 2005-2007 Gerhard H?ring +# Copyright (C) 2005-2007 Gerhard H??ring # # This file is part of pysqlite. # @@ -158,6 +157,7 @@ def setUp(self): self.con.create_function("isblob", 1, func_isblob) self.con.create_function("islonglong", 1, func_islonglong) self.con.create_function("spam", -1, func) + self.con.execute("create table test(t text)") def tearDown(self): self.con.close() @@ -276,18 +276,36 @@ def CheckAnyArguments(self): val = cur.fetchone()[0] self.assertEqual(val, 2) + # Regarding deterministic functions: + # + # Between 3.8.3 and 3.15.0, deterministic functions were only used to + # optimize inner loops, so for those versions we can only test if the + # sqlite machinery has factored out a call or not. From 3.15.0 and onward, + # deterministic functions were permitted in WHERE clauses of partial + # indices, which allows testing based on syntax, iso. the query optimizer. + @unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "Requires SQLite 3.8.3 or higher") def CheckFuncNonDeterministic(self): mock = unittest.mock.Mock(return_value=None) - self.con.create_function("deterministic", 0, mock, deterministic=False) - self.con.execute("select deterministic() = deterministic()") - self.assertEqual(mock.call_count, 2) - - @unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "deterministic parameter not supported") + self.con.create_function("nondeterministic", 0, mock, deterministic=False) + if sqlite.sqlite_version_info < (3, 15, 0): + self.con.execute("select nondeterministic() = nondeterministic()") + self.assertEqual(mock.call_count, 2) + else: + with self.assertRaises(sqlite.OperationalError): + self.con.execute("create index t on test(t) where nondeterministic() is not null") + + @unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "Requires SQLite 3.8.3 or higher") def CheckFuncDeterministic(self): mock = unittest.mock.Mock(return_value=None) self.con.create_function("deterministic", 0, mock, deterministic=True) - self.con.execute("select deterministic() = deterministic()") - self.assertEqual(mock.call_count, 1) + if sqlite.sqlite_version_info < (3, 15, 0): + self.con.execute("select deterministic() = deterministic()") + self.assertEqual(mock.call_count, 1) + else: + try: + self.con.execute("create index t on test(t) where deterministic() is not null") + except sqlite.OperationalError: + self.fail("Unexpected failure while creating partial index") @unittest.skipIf(sqlite.sqlite_version_info >= (3, 8, 3), "SQLite < 3.8.3 needed") def CheckFuncDeterministicNotSupported(self): From webhook-mailer at python.org Thu May 28 20:05:05 2020 From: webhook-mailer at python.org (Abhilash Raj) Date: Fri, 29 May 2020 00:05:05 -0000 Subject: [Python-checkins] bpo-39040: Fix parsing of email mime headers with whitespace between encoded-words. (gh-17620) Message-ID: https://github.com/python/cpython/commit/21017ed904f734be9f195ae1274eb81426a9e776 commit: 21017ed904f734be9f195ae1274eb81426a9e776 branch: master author: Abhilash Raj committer: GitHub date: 2020-05-28T20:04:59-04:00 summary: bpo-39040: Fix parsing of email mime headers with whitespace between encoded-words. (gh-17620) * bpo-39040: Fix parsing of email headers with encoded-words inside a quoted string. It is fairly common to find malformed mime headers (especially content-disposition headers) where the parameter values, instead of being encoded to RFC standards, are "encoded" by doing RFC 2047 "encoded word" encoding, and then enclosing the whole thing in quotes. The processing of these malformed headers was incorrectly leaving the spaces between encoded words in the decoded text (whitespace between adjacent encoded words is supposed to be stripped on decoding). This changeset fixes the encoded word processing inside quoted strings (bare-quoted-string) to do correct RFC 2047 decoding by stripping that whitespace. files: A Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst M Lib/email/_header_value_parser.py M Lib/test/test_email/test_headerregistry.py diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index 9c55ef7fb453b..51d355fbb0abc 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -1218,12 +1218,21 @@ def get_bare_quoted_string(value): if value[0] in WSP: token, value = get_fws(value) elif value[:2] == '=?': + valid_ew = False try: token, value = get_encoded_word(value) bare_quoted_string.defects.append(errors.InvalidHeaderDefect( "encoded word inside quoted string")) + valid_ew = True except errors.HeaderParseError: token, value = get_qcontent(value) + # Collapse the whitespace between two encoded words that occur in a + # bare-quoted-string. + if valid_ew and len(bare_quoted_string) > 1: + if (bare_quoted_string[-1].token_type == 'fws' and + bare_quoted_string[-2].token_type == 'encoded-word'): + bare_quoted_string[-1] = EWWhiteSpaceTerminal( + bare_quoted_string[-1], 'fws') else: token, value = get_qcontent(value) bare_quoted_string.append(token) diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py index 82e121350ffbf..68bbc9561c4af 100644 --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -873,6 +873,25 @@ def content_disp_as_value(self, {'filename': 'foo'}, [errors.InvalidHeaderDefect]), + 'invalid_parameter_value_with_fws_between_ew': ( + 'attachment; filename="=?UTF-8?Q?Schulbesuchsbest=C3=A4ttigung=2E?=' + ' =?UTF-8?Q?pdf?="', + 'attachment', + {'filename': 'Schulbesuchsbest?ttigung.pdf'}, + [errors.InvalidHeaderDefect]*3, + ('attachment; filename="Schulbesuchsbest?ttigung.pdf"'), + ('Content-Disposition: attachment;\n' + ' filename*=utf-8\'\'Schulbesuchsbest%C3%A4ttigung.pdf\n'), + ), + + 'parameter_value_with_fws_between_tokens': ( + 'attachment; filename="File =?utf-8?q?Name?= With Spaces.pdf"', + 'attachment', + {'filename': 'File Name With Spaces.pdf'}, + [errors.InvalidHeaderDefect], + 'attachment; filename="File Name With Spaces.pdf"', + ('Content-Disposition: attachment; filename="File Name With Spaces.pdf"\n'), + ) } diff --git a/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst b/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst new file mode 100644 index 0000000000000..078bce22be30f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst @@ -0,0 +1,2 @@ +Fix parsing of invalid mime headers parameters by collapsing whitespace between +encoded words in a bare-quote-string. From webhook-mailer at python.org Fri May 29 07:35:42 2020 From: webhook-mailer at python.org (Windson yang) Date: Fri, 29 May 2020 11:35:42 -0000 Subject: [Python-checkins] Indicate that abs() method accept argument that implement __abs__(), just like call() method in the docs (GH-20509) Message-ID: https://github.com/python/cpython/commit/28316422124206f63ddd4b91f2e19c54b6e9cd9d commit: 28316422124206f63ddd4b91f2e19c54b6e9cd9d branch: master author: Windson yang committer: GitHub date: 2020-05-29T07:35:34-04:00 summary: Indicate that abs() method accept argument that implement __abs__(), just like call() method in the docs (GH-20509) files: M Doc/library/functions.rst diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 90a2370c1793b..e9c92f7c8210d 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -43,9 +43,8 @@ are always available. They are listed here in alphabetical order. .. function:: abs(x) Return the absolute value of a number. The argument may be an - integer or a floating point number. If the argument is a complex number, its - magnitude is returned. If *x* defines :meth:`__abs__`, - ``abs(x)`` returns ``x.__abs__()``. + integer, a floating point number, or an object implementing :meth:`__abs__`. + If the argument is a complex number, its magnitude is returned. .. function:: all(iterable) From webhook-mailer at python.org Fri May 29 07:43:31 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 29 May 2020 11:43:31 -0000 Subject: [Python-checkins] bpo-39040: Fix parsing of email mime headers with whitespace between encoded-words. (gh-17620) Message-ID: https://github.com/python/cpython/commit/6381ee077d3c69d2f947f7bf87d8ec76e0caf189 commit: 6381ee077d3c69d2f947f7bf87d8ec76e0caf189 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-29T04:43:27-07:00 summary: bpo-39040: Fix parsing of email mime headers with whitespace between encoded-words. (gh-17620) * bpo-39040: Fix parsing of email headers with encoded-words inside a quoted string. It is fairly common to find malformed mime headers (especially content-disposition headers) where the parameter values, instead of being encoded to RFC standards, are "encoded" by doing RFC 2047 "encoded word" encoding, and then enclosing the whole thing in quotes. The processing of these malformed headers was incorrectly leaving the spaces between encoded words in the decoded text (whitespace between adjacent encoded words is supposed to be stripped on decoding). This changeset fixes the encoded word processing inside quoted strings (bare-quoted-string) to do correct RFC 2047 decoding by stripping that whitespace. (cherry picked from commit 21017ed904f734be9f195ae1274eb81426a9e776) Co-authored-by: Abhilash Raj files: A Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst M Lib/email/_header_value_parser.py M Lib/test/test_email/test_headerregistry.py diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index 9c55ef7fb453b..51d355fbb0abc 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -1218,12 +1218,21 @@ def get_bare_quoted_string(value): if value[0] in WSP: token, value = get_fws(value) elif value[:2] == '=?': + valid_ew = False try: token, value = get_encoded_word(value) bare_quoted_string.defects.append(errors.InvalidHeaderDefect( "encoded word inside quoted string")) + valid_ew = True except errors.HeaderParseError: token, value = get_qcontent(value) + # Collapse the whitespace between two encoded words that occur in a + # bare-quoted-string. + if valid_ew and len(bare_quoted_string) > 1: + if (bare_quoted_string[-1].token_type == 'fws' and + bare_quoted_string[-2].token_type == 'encoded-word'): + bare_quoted_string[-1] = EWWhiteSpaceTerminal( + bare_quoted_string[-1], 'fws') else: token, value = get_qcontent(value) bare_quoted_string.append(token) diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py index a43d51f730ad5..7ade9684465d5 100644 --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -872,6 +872,25 @@ def content_disp_as_value(self, {'filename': 'foo'}, [errors.InvalidHeaderDefect]), + 'invalid_parameter_value_with_fws_between_ew': ( + 'attachment; filename="=?UTF-8?Q?Schulbesuchsbest=C3=A4ttigung=2E?=' + ' =?UTF-8?Q?pdf?="', + 'attachment', + {'filename': 'Schulbesuchsbest?ttigung.pdf'}, + [errors.InvalidHeaderDefect]*3, + ('attachment; filename="Schulbesuchsbest?ttigung.pdf"'), + ('Content-Disposition: attachment;\n' + ' filename*=utf-8\'\'Schulbesuchsbest%C3%A4ttigung.pdf\n'), + ), + + 'parameter_value_with_fws_between_tokens': ( + 'attachment; filename="File =?utf-8?q?Name?= With Spaces.pdf"', + 'attachment', + {'filename': 'File Name With Spaces.pdf'}, + [errors.InvalidHeaderDefect], + 'attachment; filename="File Name With Spaces.pdf"', + ('Content-Disposition: attachment; filename="File Name With Spaces.pdf"\n'), + ) } diff --git a/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst b/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst new file mode 100644 index 0000000000000..078bce22be30f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst @@ -0,0 +1,2 @@ +Fix parsing of invalid mime headers parameters by collapsing whitespace between +encoded words in a bare-quote-string. From webhook-mailer at python.org Fri May 29 07:43:52 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 29 May 2020 11:43:52 -0000 Subject: [Python-checkins] bpo-39040: Fix parsing of email mime headers with whitespace between encoded-words. (gh-17620) Message-ID: https://github.com/python/cpython/commit/5f977e09e8a29dbd5972ad79c4fd17a394d1857f commit: 5f977e09e8a29dbd5972ad79c4fd17a394d1857f branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-29T04:43:47-07:00 summary: bpo-39040: Fix parsing of email mime headers with whitespace between encoded-words. (gh-17620) * bpo-39040: Fix parsing of email headers with encoded-words inside a quoted string. It is fairly common to find malformed mime headers (especially content-disposition headers) where the parameter values, instead of being encoded to RFC standards, are "encoded" by doing RFC 2047 "encoded word" encoding, and then enclosing the whole thing in quotes. The processing of these malformed headers was incorrectly leaving the spaces between encoded words in the decoded text (whitespace between adjacent encoded words is supposed to be stripped on decoding). This changeset fixes the encoded word processing inside quoted strings (bare-quoted-string) to do correct RFC 2047 decoding by stripping that whitespace. (cherry picked from commit 21017ed904f734be9f195ae1274eb81426a9e776) Co-authored-by: Abhilash Raj files: A Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst M Lib/email/_header_value_parser.py M Lib/test/test_email/test_headerregistry.py diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index 3197d49762e9a..b7c30c47b873e 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -1211,12 +1211,21 @@ def get_bare_quoted_string(value): if value[0] in WSP: token, value = get_fws(value) elif value[:2] == '=?': + valid_ew = False try: token, value = get_encoded_word(value) bare_quoted_string.defects.append(errors.InvalidHeaderDefect( "encoded word inside quoted string")) + valid_ew = True except errors.HeaderParseError: token, value = get_qcontent(value) + # Collapse the whitespace between two encoded words that occur in a + # bare-quoted-string. + if valid_ew and len(bare_quoted_string) > 1: + if (bare_quoted_string[-1].token_type == 'fws' and + bare_quoted_string[-2].token_type == 'encoded-word'): + bare_quoted_string[-1] = EWWhiteSpaceTerminal( + bare_quoted_string[-1], 'fws') else: token, value = get_qcontent(value) bare_quoted_string.append(token) diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py index e808e502b21e6..a95b20aecae9f 100644 --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -872,6 +872,25 @@ def content_disp_as_value(self, {'filename': 'foo'}, [errors.InvalidHeaderDefect]), + 'invalid_parameter_value_with_fws_between_ew': ( + 'attachment; filename="=?UTF-8?Q?Schulbesuchsbest=C3=A4ttigung=2E?=' + ' =?UTF-8?Q?pdf?="', + 'attachment', + {'filename': 'Schulbesuchsbest?ttigung.pdf'}, + [errors.InvalidHeaderDefect]*3, + ('attachment; filename="Schulbesuchsbest?ttigung.pdf"'), + ('Content-Disposition: attachment;\n' + ' filename*=utf-8\'\'Schulbesuchsbest%C3%A4ttigung.pdf\n'), + ), + + 'parameter_value_with_fws_between_tokens': ( + 'attachment; filename="File =?utf-8?q?Name?= With Spaces.pdf"', + 'attachment', + {'filename': 'File Name With Spaces.pdf'}, + [errors.InvalidHeaderDefect], + 'attachment; filename="File Name With Spaces.pdf"', + ('Content-Disposition: attachment; filename="File Name With Spaces.pdf"\n'), + ) } diff --git a/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst b/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst new file mode 100644 index 0000000000000..078bce22be30f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst @@ -0,0 +1,2 @@ +Fix parsing of invalid mime headers parameters by collapsing whitespace between +encoded words in a bare-quote-string. From webhook-mailer at python.org Fri May 29 07:59:48 2020 From: webhook-mailer at python.org (Andrew Kuchling) Date: Fri, 29 May 2020 11:59:48 -0000 Subject: [Python-checkins] [3.8] bpo-25872: Fix KeyError in linecache when multithreaded (GH-18007) (GH-20092) Message-ID: https://github.com/python/cpython/commit/b86636bff4b29ce23c886df079715dd951f13a07 commit: b86636bff4b29ce23c886df079715dd951f13a07 branch: 3.8 author: Andrew Kuchling committer: GitHub date: 2020-05-29T04:59:44-07:00 summary: [3.8] bpo-25872: Fix KeyError in linecache when multithreaded (GH-18007) (GH-20092) Backporting to 3.8 and adding a NEWS item (I should have added one to the master branch -- oh well). files: A Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst M Lib/linecache.py diff --git a/Lib/linecache.py b/Lib/linecache.py index 3afcce1f0a145..c87e1807bfafa 100644 --- a/Lib/linecache.py +++ b/Lib/linecache.py @@ -73,10 +73,10 @@ def checkcache(filename=None): try: stat = os.stat(fullname) except OSError: - del cache[filename] + cache.pop(filename, None) continue if size != stat.st_size or mtime != stat.st_mtime: - del cache[filename] + cache.pop(filename, None) def updatecache(filename, module_globals=None): @@ -86,7 +86,7 @@ def updatecache(filename, module_globals=None): if filename in cache: if len(cache[filename]) != 1: - del cache[filename] + cache.pop(filename, None) if not filename or (filename.startswith('<') and filename.endswith('>')): return [] diff --git a/Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst b/Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst new file mode 100644 index 0000000000000..3fd8bac73edbe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst @@ -0,0 +1,2 @@ +:mod:`linecache` could crash with a :exc:`KeyError` when accessed from multiple threads. +Fix by Michael Graczyk. From webhook-mailer at python.org Fri May 29 08:17:47 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 29 May 2020 12:17:47 -0000 Subject: [Python-checkins] [3.8] bpo-25872: Fix KeyError in linecache when multithreaded (GH-18007) (GH-20092) Message-ID: https://github.com/python/cpython/commit/852e8a7ed4d3d48e5c1c8120cfc932eb6a84bb8e commit: 852e8a7ed4d3d48e5c1c8120cfc932eb6a84bb8e branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-29T05:17:42-07:00 summary: [3.8] bpo-25872: Fix KeyError in linecache when multithreaded (GH-18007) (GH-20092) Backporting to 3.8 and adding a NEWS item (I should have added one to the master branch -- oh well). (cherry picked from commit b86636bff4b29ce23c886df079715dd951f13a07) Co-authored-by: Andrew Kuchling files: A Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst M Lib/linecache.py diff --git a/Lib/linecache.py b/Lib/linecache.py index 3afcce1f0a145..c87e1807bfafa 100644 --- a/Lib/linecache.py +++ b/Lib/linecache.py @@ -73,10 +73,10 @@ def checkcache(filename=None): try: stat = os.stat(fullname) except OSError: - del cache[filename] + cache.pop(filename, None) continue if size != stat.st_size or mtime != stat.st_mtime: - del cache[filename] + cache.pop(filename, None) def updatecache(filename, module_globals=None): @@ -86,7 +86,7 @@ def updatecache(filename, module_globals=None): if filename in cache: if len(cache[filename]) != 1: - del cache[filename] + cache.pop(filename, None) if not filename or (filename.startswith('<') and filename.endswith('>')): return [] diff --git a/Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst b/Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst new file mode 100644 index 0000000000000..3fd8bac73edbe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst @@ -0,0 +1,2 @@ +:mod:`linecache` could crash with a :exc:`KeyError` when accessed from multiple threads. +Fix by Michael Graczyk. From webhook-mailer at python.org Fri May 29 08:46:40 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 29 May 2020 12:46:40 -0000 Subject: [Python-checkins] bpo-40784: Fix sqlite3 deterministic test (GH-20448) Message-ID: https://github.com/python/cpython/commit/00a240bf7f95bbd220f1cfbf9eb58484a5f9681a commit: 00a240bf7f95bbd220f1cfbf9eb58484a5f9681a branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-29T05:46:34-07:00 summary: bpo-40784: Fix sqlite3 deterministic test (GH-20448) (cherry picked from commit c610d970f5373b143bf5f5900d4645e6a90fb460) Co-authored-by: Erlend Egeberg Aasland files: M Lib/sqlite3/test/userfunctions.py diff --git a/Lib/sqlite3/test/userfunctions.py b/Lib/sqlite3/test/userfunctions.py index 9501f535c4999..c11c82e127577 100644 --- a/Lib/sqlite3/test/userfunctions.py +++ b/Lib/sqlite3/test/userfunctions.py @@ -1,8 +1,7 @@ -#-*- coding: iso-8859-1 -*- # pysqlite2/test/userfunctions.py: tests for user-defined functions and # aggregates. # -# Copyright (C) 2005-2007 Gerhard H?ring +# Copyright (C) 2005-2007 Gerhard H??ring # # This file is part of pysqlite. # @@ -158,6 +157,7 @@ def setUp(self): self.con.create_function("isblob", 1, func_isblob) self.con.create_function("islonglong", 1, func_islonglong) self.con.create_function("spam", -1, func) + self.con.execute("create table test(t text)") def tearDown(self): self.con.close() @@ -276,18 +276,36 @@ def CheckAnyArguments(self): val = cur.fetchone()[0] self.assertEqual(val, 2) + # Regarding deterministic functions: + # + # Between 3.8.3 and 3.15.0, deterministic functions were only used to + # optimize inner loops, so for those versions we can only test if the + # sqlite machinery has factored out a call or not. From 3.15.0 and onward, + # deterministic functions were permitted in WHERE clauses of partial + # indices, which allows testing based on syntax, iso. the query optimizer. + @unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "Requires SQLite 3.8.3 or higher") def CheckFuncNonDeterministic(self): mock = unittest.mock.Mock(return_value=None) - self.con.create_function("deterministic", 0, mock, deterministic=False) - self.con.execute("select deterministic() = deterministic()") - self.assertEqual(mock.call_count, 2) - - @unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "deterministic parameter not supported") + self.con.create_function("nondeterministic", 0, mock, deterministic=False) + if sqlite.sqlite_version_info < (3, 15, 0): + self.con.execute("select nondeterministic() = nondeterministic()") + self.assertEqual(mock.call_count, 2) + else: + with self.assertRaises(sqlite.OperationalError): + self.con.execute("create index t on test(t) where nondeterministic() is not null") + + @unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "Requires SQLite 3.8.3 or higher") def CheckFuncDeterministic(self): mock = unittest.mock.Mock(return_value=None) self.con.create_function("deterministic", 0, mock, deterministic=True) - self.con.execute("select deterministic() = deterministic()") - self.assertEqual(mock.call_count, 1) + if sqlite.sqlite_version_info < (3, 15, 0): + self.con.execute("select deterministic() = deterministic()") + self.assertEqual(mock.call_count, 1) + else: + try: + self.con.execute("create index t on test(t) where deterministic() is not null") + except sqlite.OperationalError: + self.fail("Unexpected failure while creating partial index") @unittest.skipIf(sqlite.sqlite_version_info >= (3, 8, 3), "SQLite < 3.8.3 needed") def CheckFuncDeterministicNotSupported(self): From webhook-mailer at python.org Fri May 29 09:24:05 2020 From: webhook-mailer at python.org (Mark Dickinson) Date: Fri, 29 May 2020 13:24:05 -0000 Subject: [Python-checkins] bpo-40780: Fix failure of _Py_dg_dtoa to remove trailing zeros (GH-20435) Message-ID: https://github.com/python/cpython/commit/895c9c1d438367722f74f437fda96767d770662b commit: 895c9c1d438367722f74f437fda96767d770662b branch: master author: Mark Dickinson committer: GitHub date: 2020-05-29T14:23:57+01:00 summary: bpo-40780: Fix failure of _Py_dg_dtoa to remove trailing zeros (GH-20435) * Fix failure of _Py_dg_dtoa to remove trailing zeros * Add regression test and news entry * Add explanation about why it's safe to strip trailing zeros * Make code safer, clean up comments, add change note at top of file * Nitpick: avoid implicit int-to-float conversion in tests files: A Misc/NEWS.d/next/Core and Builtins/2020-05-26-17-43-58.bpo-40780.3Ckdgm.rst M Lib/test/test_format.py M Python/dtoa.c diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py index 4559cd5623efe..e9e5bb9cf00a6 100644 --- a/Lib/test/test_format.py +++ b/Lib/test/test_format.py @@ -484,6 +484,17 @@ def test_precision_c_limits(self): with self.assertRaises(ValueError) as cm: format(c, ".%sf" % (INT_MAX + 1)) + def test_g_format_has_no_trailing_zeros(self): + # regression test for bugs.python.org/issue40780 + self.assertEqual("%.3g" % 1505.0, "1.5e+03") + self.assertEqual("%#.3g" % 1505.0, "1.50e+03") + + self.assertEqual(format(1505.0, ".3g"), "1.5e+03") + self.assertEqual(format(1505.0, "#.3g"), "1.50e+03") + + self.assertEqual(format(12300050.0, ".6g"), "1.23e+07") + self.assertEqual(format(12300050.0, "#.6g"), "1.23000e+07") + if __name__ == "__main__": unittest.main() diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-26-17-43-58.bpo-40780.3Ckdgm.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-26-17-43-58.bpo-40780.3Ckdgm.rst new file mode 100644 index 0000000000000..ed6020c2e2355 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-26-17-43-58.bpo-40780.3Ckdgm.rst @@ -0,0 +1,2 @@ +Fix a corner case where g-style string formatting of a float failed to +remove trailing zeros. diff --git a/Python/dtoa.c b/Python/dtoa.c index 822adc612962a..e629b296426f3 100644 --- a/Python/dtoa.c +++ b/Python/dtoa.c @@ -64,6 +64,9 @@ * 7. _Py_dg_strtod has been modified so that it doesn't accept strings with * leading whitespace. * + * 8. A corner case where _Py_dg_dtoa didn't strip trailing zeros has been + * fixed. (bugs.python.org/issue40780) + * ***************************************************************/ /* Please send bug reports for the original dtoa.c code to David M. Gay (dmg @@ -2563,6 +2566,14 @@ _Py_dg_dtoa(double dd, int mode, int ndigits, } ++*s++; } + else { + /* Strip trailing zeros. This branch was missing from the + original dtoa.c, leading to surplus trailing zeros in + some cases. See bugs.python.org/issue40780. */ + while (s > s0 && s[-1] == '0') { + --s; + } + } break; } } From webhook-mailer at python.org Fri May 29 09:34:38 2020 From: webhook-mailer at python.org (Paul Ganssle) Date: Fri, 29 May 2020 13:34:38 -0000 Subject: [Python-checkins] Further de-linting of zoneinfo module (#20499) Message-ID: https://github.com/python/cpython/commit/364b5ead1584583db91ef7f9d9f87f01bfbb5774 commit: 364b5ead1584583db91ef7f9d9f87f01bfbb5774 branch: master author: Paul Ganssle committer: GitHub date: 2020-05-29T09:34:30-04:00 summary: Further de-linting of zoneinfo module (#20499) * Remove unused imports in zoneinfo * Remove unused variables in zoneinfo * Remove else after raise files: M Lib/test/test_zoneinfo/test_zoneinfo.py M Lib/zoneinfo/_common.py M Lib/zoneinfo/_tzpath.py M Lib/zoneinfo/_zoneinfo.py diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py index fe2c380c51442..1f1fa60f1ffc1 100644 --- a/Lib/test/test_zoneinfo/test_zoneinfo.py +++ b/Lib/test/test_zoneinfo/test_zoneinfo.py @@ -19,12 +19,7 @@ from functools import cached_property from . import _support as test_support -from ._support import ( - OS_ENV_LOCK, - TZPATH_LOCK, - TZPATH_TEST_LOCK, - ZoneInfoTestBase, -) +from ._support import OS_ENV_LOCK, TZPATH_TEST_LOCK, ZoneInfoTestBase py_zoneinfo, c_zoneinfo = test_support.get_modules() @@ -365,7 +360,6 @@ def test_folds_and_gaps(self): self.assertEqual(dt.dst(), offset.dst, dt) def test_folds_from_utc(self): - tests = [] for key in self.zones(): zi = self.zone_from_key(key) with self.subTest(key=key): @@ -927,7 +921,7 @@ def populate_tzstr_header(cls): # the Version 2+ file. In this case, we have no transitions, just # the tzstr in the footer, so up to the footer, the files are # identical and we can just write the same file twice in a row. - for i in range(2): + for _ in range(2): out += b"TZif" # Magic value out += b"3" # Version out += b" " * 15 # Reserved @@ -952,7 +946,6 @@ def zone_from_tzstr(self, tzstr): return self.klass.from_file(zonefile, key=tzstr) def test_tzstr_localized(self): - i = 0 for tzstr, cases in self.test_cases.items(): with self.subTest(tzstr=tzstr): zi = self.zone_from_tzstr(tzstr) diff --git a/Lib/zoneinfo/_common.py b/Lib/zoneinfo/_common.py index 3d35d4f4b463f..41c898f37e4f8 100644 --- a/Lib/zoneinfo/_common.py +++ b/Lib/zoneinfo/_common.py @@ -80,7 +80,6 @@ def load_data(fobj): # not by position in the array but by position in the unsplit # abbreviation string. I suppose this makes more sense in C, which uses # null to terminate the strings, but it's inconvenient here... - char_total = 0 abbr_vals = {} abbr_chars = fobj.read(charcnt) diff --git a/Lib/zoneinfo/_tzpath.py b/Lib/zoneinfo/_tzpath.py index 9e381b6e4434b..9513611c17738 100644 --- a/Lib/zoneinfo/_tzpath.py +++ b/Lib/zoneinfo/_tzpath.py @@ -12,7 +12,8 @@ def reset_tzpath(to=None): f"tzpaths must be a list or tuple, " + f"not {type(tzpaths)}: {tzpaths!r}" ) - elif not all(map(os.path.isabs, tzpaths)): + + if not all(map(os.path.isabs, tzpaths)): raise ValueError(_get_invalid_paths_message(tzpaths)) base_tzpath = tzpaths else: diff --git a/Lib/zoneinfo/_zoneinfo.py b/Lib/zoneinfo/_zoneinfo.py index b207dd346fe25..7b1718a0676e1 100644 --- a/Lib/zoneinfo/_zoneinfo.py +++ b/Lib/zoneinfo/_zoneinfo.py @@ -4,7 +4,7 @@ import functools import re import weakref -from datetime import datetime, timedelta, timezone, tzinfo +from datetime import datetime, timedelta, tzinfo from . import _common, _tzpath From webhook-mailer at python.org Fri May 29 12:28:09 2020 From: webhook-mailer at python.org (Niklas Fiekas) Date: Fri, 29 May 2020 16:28:09 -0000 Subject: [Python-checkins] bpo-29882: Add an efficient popcount method for integers (#771) Message-ID: https://github.com/python/cpython/commit/8bd216dfede9cb2d5bedb67f20a30c99844dbfb8 commit: 8bd216dfede9cb2d5bedb67f20a30c99844dbfb8 branch: master author: Niklas Fiekas committer: GitHub date: 2020-05-29T17:28:02+01:00 summary: bpo-29882: Add an efficient popcount method for integers (#771) * bpo-29882: Add an efficient popcount method for integers * Update 'sign bit' and versionadded in docs * Add entry to whatsnew document * Doc: use positive example, mention population count * Minor cleanups of the core code * Move popcount_digit closer to where it's used * Use z instead of self after conversion * Add 'absolute value' and 'population count' to docstring * Fix clinic error about missing summary line * Ensure popcount_digit is portable with 64-bit ints Co-authored-by: Mark Dickinson files: A Misc/NEWS.d/next/Core and Builtins/2019-06-02-11-29-15.bpo-29882.AkRzjb.rst M Doc/library/stdtypes.rst M Doc/whatsnew/3.10.rst M Lib/test/test_doctest.py M Lib/test/test_long.py M Objects/clinic/longobject.c.h M Objects/longobject.c diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 6a9fdcb38d24b..2082b849fd05b 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -478,6 +478,27 @@ class`. In addition, it provides a few more methods: .. versionadded:: 3.1 +.. method:: int.bit_count() + + Return the number of ones in the binary representation of the absolute + value of the integer. This is also known as the population count. + Example:: + + >>> n = 19 + >>> bin(n) + '0b10011' + >>> n.bit_count() + 3 + >>> (-n).bit_count() + 3 + + Equivalent to:: + + def bit_count(self): + return bin(self).count("1") + + .. versionadded:: 3.10 + .. method:: int.to_bytes(length, byteorder, \*, signed=False) Return an array of bytes representing an integer. diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 34a09fe4b505c..8a6b02179db17 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -70,6 +70,9 @@ Summary -- Release highlights New Features ============ +* The :class:`int` type has a new method :meth:`int.bit_count`, returning the + number of ones in the binary expansion of a given integer, also known + as the population count. (Contributed by Niklas Fiekas in :issue:`29882`.) Other Language Changes diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py index 3efe5dafc20ad..8d9f872968775 100644 --- a/Lib/test/test_doctest.py +++ b/Lib/test/test_doctest.py @@ -669,7 +669,7 @@ def non_Python_modules(): r""" True >>> real_tests = [t for t in tests if len(t.examples) > 0] >>> len(real_tests) # objects that actually have doctests - 13 + 14 >>> for t in real_tests: ... print('{} {}'.format(len(t.examples), t.name)) ... @@ -682,6 +682,7 @@ def non_Python_modules(): r""" 1 builtins.hex 1 builtins.int 3 builtins.int.as_integer_ratio + 2 builtins.int.bit_count 2 builtins.int.bit_length 5 builtins.memoryview.hex 1 builtins.oct diff --git a/Lib/test/test_long.py b/Lib/test/test_long.py index 7ce37e8dbd6c7..c97842b5bfd23 100644 --- a/Lib/test/test_long.py +++ b/Lib/test/test_long.py @@ -1016,6 +1016,17 @@ def test_bit_length(self): self.assertEqual((a+1).bit_length(), i+1) self.assertEqual((-a-1).bit_length(), i+1) + def test_bit_count(self): + for a in range(-1000, 1000): + self.assertEqual(a.bit_count(), bin(a).count("1")) + + for exp in [10, 17, 63, 64, 65, 1009, 70234, 1234567]: + a = 2**exp + self.assertEqual(a.bit_count(), 1) + self.assertEqual((a - 1).bit_count(), exp) + self.assertEqual((a ^ 63).bit_count(), 7) + self.assertEqual(((a - 1) ^ 510).bit_count(), exp - 8) + def test_round(self): # check round-half-even algorithm. For round to nearest ten; # rounding map is invariant under adding multiples of 20 diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-06-02-11-29-15.bpo-29882.AkRzjb.rst b/Misc/NEWS.d/next/Core and Builtins/2019-06-02-11-29-15.bpo-29882.AkRzjb.rst new file mode 100644 index 0000000000000..240b5680b36a2 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-06-02-11-29-15.bpo-29882.AkRzjb.rst @@ -0,0 +1,2 @@ +Add :meth:`int.bit_count()`, counting the number of ones in the binary +representation of an integer. Patch by Niklas Fiekas. diff --git a/Objects/clinic/longobject.c.h b/Objects/clinic/longobject.c.h index 7db89650aea63..cf388c50f5a6a 100644 --- a/Objects/clinic/longobject.c.h +++ b/Objects/clinic/longobject.c.h @@ -138,6 +138,31 @@ int_bit_length(PyObject *self, PyObject *Py_UNUSED(ignored)) return int_bit_length_impl(self); } +PyDoc_STRVAR(int_bit_count__doc__, +"bit_count($self, /)\n" +"--\n" +"\n" +"Number of ones in the binary representation of the absolute value of self.\n" +"\n" +"Also known as the population count.\n" +"\n" +">>> bin(13)\n" +"\'0b1101\'\n" +">>> (13).bit_count()\n" +"3"); + +#define INT_BIT_COUNT_METHODDEF \ + {"bit_count", (PyCFunction)int_bit_count, METH_NOARGS, int_bit_count__doc__}, + +static PyObject * +int_bit_count_impl(PyObject *self); + +static PyObject * +int_bit_count(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return int_bit_count_impl(self); +} + PyDoc_STRVAR(int_as_integer_ratio__doc__, "as_integer_ratio($self, /)\n" "--\n" @@ -308,4 +333,4 @@ int_from_bytes(PyTypeObject *type, PyObject *const *args, Py_ssize_t nargs, PyOb exit: return return_value; } -/*[clinic end generated code: output=63b8274fc784d617 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4257cfdb155efd00 input=a9049054013a1b77]*/ diff --git a/Objects/longobject.c b/Objects/longobject.c index 4ae17c972c215..0b209a403c4b7 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -5304,6 +5304,75 @@ int_bit_length_impl(PyObject *self) return NULL; } +static int +popcount_digit(digit d) +{ + /* 32bit SWAR popcount. */ + uint32_t u = d; + u -= (u >> 1) & 0x55555555U; + u = (u & 0x33333333U) + ((u >> 2) & 0x33333333U); + u = (u + (u >> 4)) & 0x0f0f0f0fU; + return (uint32_t)(u * 0x01010101U) >> 24; +} + +/*[clinic input] +int.bit_count + +Number of ones in the binary representation of the absolute value of self. + +Also known as the population count. + +>>> bin(13) +'0b1101' +>>> (13).bit_count() +3 +[clinic start generated code]*/ + +static PyObject * +int_bit_count_impl(PyObject *self) +/*[clinic end generated code: output=2e571970daf1e5c3 input=7e0adef8e8ccdf2e]*/ +{ + assert(self != NULL); + assert(PyLong_Check(self)); + + PyLongObject *z = (PyLongObject *)self; + Py_ssize_t ndigits = Py_ABS(Py_SIZE(z)); + Py_ssize_t bit_count = 0; + + /* Each digit has up to PyLong_SHIFT ones, so the accumulated bit count + from the first PY_SSIZE_T_MAX/PyLong_SHIFT digits can't overflow a + Py_ssize_t. */ + Py_ssize_t ndigits_fast = Py_MIN(ndigits, PY_SSIZE_T_MAX/PyLong_SHIFT); + for (Py_ssize_t i = 0; i < ndigits_fast; i++) { + bit_count += popcount_digit(z->ob_digit[i]); + } + + PyObject *result = PyLong_FromSsize_t(bit_count); + if (result == NULL) { + return NULL; + } + + /* Use Python integers if bit_count would overflow. */ + for (Py_ssize_t i = ndigits_fast; i < ndigits; i++) { + PyObject *x = PyLong_FromLong(popcount_digit(z->ob_digit[i])); + if (x == NULL) { + goto error; + } + PyObject *y = long_add((PyLongObject *)result, (PyLongObject *)x); + Py_DECREF(x); + if (y == NULL) { + goto error; + } + Py_DECREF(result); + result = y; + } + + return result; + + error: + Py_DECREF(result); + return NULL; +} /*[clinic input] int.as_integer_ratio @@ -5460,6 +5529,7 @@ static PyMethodDef long_methods[] = { {"conjugate", long_long_meth, METH_NOARGS, "Returns self, the complex conjugate of any int."}, INT_BIT_LENGTH_METHODDEF + INT_BIT_COUNT_METHODDEF INT_TO_BYTES_METHODDEF INT_FROM_BYTES_METHODDEF INT_AS_INTEGER_RATIO_METHODDEF From webhook-mailer at python.org Fri May 29 15:35:29 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 29 May 2020 19:35:29 -0000 Subject: [Python-checkins] bpo-24048: Save the live exception during import.c's remove_module() (GH-13005) Message-ID: https://github.com/python/cpython/commit/5aa40e587e63bcad22c7e196fc3559e2b5e0792b commit: 5aa40e587e63bcad22c7e196fc3559e2b5e0792b branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-29T12:35:21-07:00 summary: bpo-24048: Save the live exception during import.c's remove_module() (GH-13005) Save the live exception during the course of remove_module(). (cherry picked from commit 94a64e9cd411a87514b68082c1c437eb3b49dfb9) Co-authored-by: Zackery Spytz files: A Misc/NEWS.d/next/Core and Builtins/2019-04-29-03-27-22.bpo-24048.vXxUDQ.rst M Python/import.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-29-03-27-22.bpo-24048.vXxUDQ.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-29-03-27-22.bpo-24048.vXxUDQ.rst new file mode 100644 index 0000000000000..27c86a47f4b92 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-29-03-27-22.bpo-24048.vXxUDQ.rst @@ -0,0 +1 @@ +Save the live exception during import.c's ``remove_module()``. diff --git a/Python/import.c b/Python/import.c index 6d014cf5b008f..d436d576eb3cd 100644 --- a/Python/import.c +++ b/Python/import.c @@ -827,14 +827,18 @@ PyImport_AddModule(const char *name) static void remove_module(PyObject *name) { + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); PyObject *modules = PyImport_GetModuleDict(); + if (!PyMapping_HasKey(modules, name)) { + goto out; + } if (PyMapping_DelItem(modules, name) < 0) { - if (!PyMapping_HasKey(modules, name)) { - return; - } Py_FatalError("import: deleting existing key in " "sys.modules failed"); } +out: + PyErr_Restore(type, value, traceback); } From webhook-mailer at python.org Fri May 29 18:54:19 2020 From: webhook-mailer at python.org (Terry Jan Reedy) Date: Fri, 29 May 2020 22:54:19 -0000 Subject: [Python-checkins] bpo-39885: Make IDLE context menu cut and copy work again (GH-18951) Message-ID: https://github.com/python/cpython/commit/97e4e0f53d6690db6b942678489716a30925b8af commit: 97e4e0f53d6690db6b942678489716a30925b8af branch: master author: Terry Jan Reedy committer: GitHub date: 2020-05-29T18:54:14-04:00 summary: bpo-39885: Make IDLE context menu cut and copy work again (GH-18951) Leave selection when right click within. This exception to clearing selections when right-clicking was omitted from the previous commit, 4ca060d. I did not realize that this completely disabled the context menu entries, and I should have merged a minimal fix immediately. An automated test should follow. files: A Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/editor.py M Lib/idlelib/idle_test/test_editor.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 709008f78a571..7982afa7d1f67 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -12,8 +12,9 @@ when fetching a calltip. bpo-27115: For 'Go to Line', use a Query entry box subclass with IDLE standard behavior and improved error checking. -bpo-39885: Since clicking to get an IDLE context menu moves the -cursor, any text selection should be and now is cleared. +bpo-39885: When a context menu is invoked by right-clicking outside +of a selection, clear the selection and move the cursor. Cut and +Copy require that the click be within the selection. bpo-39852: Edit "Go to line" now clears any selection, preventing accidental deletion. It also updates Ln and Col on the status bar. diff --git a/Lib/idlelib/editor.py b/Lib/idlelib/editor.py index b0f88b5463d1b..a178eaf93c013 100644 --- a/Lib/idlelib/editor.py +++ b/Lib/idlelib/editor.py @@ -499,15 +499,23 @@ def handle_yview(self, event, *args): rmenu = None def right_menu_event(self, event): - self.text.tag_remove("sel", "1.0", "end") - self.text.mark_set("insert", "@%d,%d" % (event.x, event.y)) + text = self.text + newdex = text.index(f'@{event.x},{event.y}') + try: + in_selection = (text.compare('sel.first', '<=', newdex) and + text.compare(newdex, '<=', 'sel.last')) + except TclError: + in_selection = False + if not in_selection: + text.tag_remove("sel", "1.0", "end") + text.mark_set("insert", newdex) if not self.rmenu: self.make_rmenu() rmenu = self.rmenu self.event = event iswin = sys.platform[:3] == 'win' if iswin: - self.text.config(cursor="arrow") + text.config(cursor="arrow") for item in self.rmenu_specs: try: @@ -520,7 +528,6 @@ def right_menu_event(self, event): state = getattr(self, verify_state)() rmenu.entryconfigure(label, state=state) - rmenu.tk_popup(event.x_root, event.y_root) if iswin: self.text.config(cursor="ibeam") diff --git a/Lib/idlelib/idle_test/test_editor.py b/Lib/idlelib/idle_test/test_editor.py index 91e8ef89d1d72..443dcf021679f 100644 --- a/Lib/idlelib/idle_test/test_editor.py +++ b/Lib/idlelib/idle_test/test_editor.py @@ -5,6 +5,7 @@ from collections import namedtuple from test.support import requires from tkinter import Tk +from idlelib.idle_test.mock_idle import Func Editor = editor.EditorWindow @@ -92,6 +93,12 @@ def test_tabwidth_8(self): ) +def insert(text, string): + text.delete('1.0', 'end') + text.insert('end', string) + text.update() # Force update for colorizer to finish. + + class IndentAndNewlineTest(unittest.TestCase): @classmethod @@ -113,13 +120,6 @@ def tearDownClass(cls): cls.root.destroy() del cls.root - def insert(self, text): - t = self.window.text - t.delete('1.0', 'end') - t.insert('end', text) - # Force update for colorizer to finish. - t.update() - def test_indent_and_newline_event(self): eq = self.assertEqual w = self.window @@ -170,13 +170,13 @@ def test_indent_and_newline_event(self): w.prompt_last_line = '' for test in tests: with self.subTest(label=test.label): - self.insert(test.text) + insert(text, test.text) text.mark_set('insert', test.mark) nl(event=None) eq(get('1.0', 'end'), test.expected) # Selected text. - self.insert(' def f1(self, a, b):\n return a + b') + insert(text, ' def f1(self, a, b):\n return a + b') text.tag_add('sel', '1.17', '1.end') nl(None) # Deletes selected text before adding new line. @@ -184,11 +184,37 @@ def test_indent_and_newline_event(self): # Preserves the whitespace in shell prompt. w.prompt_last_line = '>>> ' - self.insert('>>> \t\ta =') + insert(text, '>>> \t\ta =') text.mark_set('insert', '1.5') nl(None) eq(get('1.0', 'end'), '>>> \na =\n') +class RMenuTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + cls.root.withdraw() + cls.window = Editor(root=cls.root) + + @classmethod + def tearDownClass(cls): + cls.window._close() + del cls.window + cls.root.update_idletasks() + for id in cls.root.tk.call('after', 'info'): + cls.root.after_cancel(id) + cls.root.destroy() + del cls.root + + class DummyRMenu: + def tk_popup(x, y): pass + + def test_rclick(self): + pass + + if __name__ == '__main__': unittest.main(verbosity=2) diff --git a/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst b/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst new file mode 100644 index 0000000000000..a847b75997117 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst @@ -0,0 +1,2 @@ +Make context menu Cut and Copy work again when right-clicking within a +selection. From webhook-mailer at python.org Fri May 29 19:11:36 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 29 May 2020 23:11:36 -0000 Subject: [Python-checkins] bpo-39885: Make IDLE context menu cut and copy work again (GH-18951) Message-ID: https://github.com/python/cpython/commit/80b6a05d38ce34ef543a2376b8132eef4ada67ce commit: 80b6a05d38ce34ef543a2376b8132eef4ada67ce branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-29T16:11:28-07:00 summary: bpo-39885: Make IDLE context menu cut and copy work again (GH-18951) Leave selection when right click within. This exception to clearing selections when right-clicking was omitted from the previous commit, 4ca060d. I did not realize that this completely disabled the context menu entries, and I should have merged a minimal fix immediately. An automated test should follow. (cherry picked from commit 97e4e0f53d6690db6b942678489716a30925b8af) Co-authored-by: Terry Jan Reedy files: A Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/editor.py M Lib/idlelib/idle_test/test_editor.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 8226af4a4de6c..c751dc3bb099a 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -17,8 +17,9 @@ Released on 2020-03-10 bpo-27115: For 'Go to Line', use a Query entry box subclass with IDLE standard behavior and improved error checking. -bpo-39885: Since clicking to get an IDLE context menu moves the -cursor, any text selection should be and now is cleared. +bpo-39885: When a context menu is invoked by right-clicking outside +of a selection, clear the selection and move the cursor. Cut and +Copy require that the click be within the selection. bpo-39852: Edit "Go to line" now clears any selection, preventing accidental deletion. It also updates Ln and Col on the status bar. diff --git a/Lib/idlelib/editor.py b/Lib/idlelib/editor.py index b0f88b5463d1b..a178eaf93c013 100644 --- a/Lib/idlelib/editor.py +++ b/Lib/idlelib/editor.py @@ -499,15 +499,23 @@ def handle_yview(self, event, *args): rmenu = None def right_menu_event(self, event): - self.text.tag_remove("sel", "1.0", "end") - self.text.mark_set("insert", "@%d,%d" % (event.x, event.y)) + text = self.text + newdex = text.index(f'@{event.x},{event.y}') + try: + in_selection = (text.compare('sel.first', '<=', newdex) and + text.compare(newdex, '<=', 'sel.last')) + except TclError: + in_selection = False + if not in_selection: + text.tag_remove("sel", "1.0", "end") + text.mark_set("insert", newdex) if not self.rmenu: self.make_rmenu() rmenu = self.rmenu self.event = event iswin = sys.platform[:3] == 'win' if iswin: - self.text.config(cursor="arrow") + text.config(cursor="arrow") for item in self.rmenu_specs: try: @@ -520,7 +528,6 @@ def right_menu_event(self, event): state = getattr(self, verify_state)() rmenu.entryconfigure(label, state=state) - rmenu.tk_popup(event.x_root, event.y_root) if iswin: self.text.config(cursor="ibeam") diff --git a/Lib/idlelib/idle_test/test_editor.py b/Lib/idlelib/idle_test/test_editor.py index 91e8ef89d1d72..443dcf021679f 100644 --- a/Lib/idlelib/idle_test/test_editor.py +++ b/Lib/idlelib/idle_test/test_editor.py @@ -5,6 +5,7 @@ from collections import namedtuple from test.support import requires from tkinter import Tk +from idlelib.idle_test.mock_idle import Func Editor = editor.EditorWindow @@ -92,6 +93,12 @@ def test_tabwidth_8(self): ) +def insert(text, string): + text.delete('1.0', 'end') + text.insert('end', string) + text.update() # Force update for colorizer to finish. + + class IndentAndNewlineTest(unittest.TestCase): @classmethod @@ -113,13 +120,6 @@ def tearDownClass(cls): cls.root.destroy() del cls.root - def insert(self, text): - t = self.window.text - t.delete('1.0', 'end') - t.insert('end', text) - # Force update for colorizer to finish. - t.update() - def test_indent_and_newline_event(self): eq = self.assertEqual w = self.window @@ -170,13 +170,13 @@ def test_indent_and_newline_event(self): w.prompt_last_line = '' for test in tests: with self.subTest(label=test.label): - self.insert(test.text) + insert(text, test.text) text.mark_set('insert', test.mark) nl(event=None) eq(get('1.0', 'end'), test.expected) # Selected text. - self.insert(' def f1(self, a, b):\n return a + b') + insert(text, ' def f1(self, a, b):\n return a + b') text.tag_add('sel', '1.17', '1.end') nl(None) # Deletes selected text before adding new line. @@ -184,11 +184,37 @@ def test_indent_and_newline_event(self): # Preserves the whitespace in shell prompt. w.prompt_last_line = '>>> ' - self.insert('>>> \t\ta =') + insert(text, '>>> \t\ta =') text.mark_set('insert', '1.5') nl(None) eq(get('1.0', 'end'), '>>> \na =\n') +class RMenuTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + cls.root.withdraw() + cls.window = Editor(root=cls.root) + + @classmethod + def tearDownClass(cls): + cls.window._close() + del cls.window + cls.root.update_idletasks() + for id in cls.root.tk.call('after', 'info'): + cls.root.after_cancel(id) + cls.root.destroy() + del cls.root + + class DummyRMenu: + def tk_popup(x, y): pass + + def test_rclick(self): + pass + + if __name__ == '__main__': unittest.main(verbosity=2) diff --git a/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst b/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst new file mode 100644 index 0000000000000..a847b75997117 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst @@ -0,0 +1,2 @@ +Make context menu Cut and Copy work again when right-clicking within a +selection. From webhook-mailer at python.org Fri May 29 19:13:04 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 29 May 2020 23:13:04 -0000 Subject: [Python-checkins] bpo-39885: Make IDLE context menu cut and copy work again (GH-18951) Message-ID: https://github.com/python/cpython/commit/3dcccd1186febe0bca5936aed750d55c68b78bcd commit: 3dcccd1186febe0bca5936aed750d55c68b78bcd branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-29T16:13:00-07:00 summary: bpo-39885: Make IDLE context menu cut and copy work again (GH-18951) Leave selection when right click within. This exception to clearing selections when right-clicking was omitted from the previous commit, 4ca060d. I did not realize that this completely disabled the context menu entries, and I should have merged a minimal fix immediately. An automated test should follow. (cherry picked from commit 97e4e0f53d6690db6b942678489716a30925b8af) Co-authored-by: Terry Jan Reedy files: A Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/editor.py M Lib/idlelib/idle_test/test_editor.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index ff74abf5edde3..edd00d4cdac1e 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -17,8 +17,9 @@ when fetching a calltip. bpo-27115: For 'Go to Line', use a Query entry box subclass with IDLE standard behavior and improved error checking. -bpo-39885: Since clicking to get an IDLE context menu moves the -cursor, any text selection should be and now is cleared. +bpo-39885: When a context menu is invoked by right-clicking outside +of a selection, clear the selection and move the cursor. Cut and +Copy require that the click be within the selection. bpo-39852: Edit "Go to line" now clears any selection, preventing accidental deletion. It also updates Ln and Col on the status bar. diff --git a/Lib/idlelib/editor.py b/Lib/idlelib/editor.py index b0f88b5463d1b..a178eaf93c013 100644 --- a/Lib/idlelib/editor.py +++ b/Lib/idlelib/editor.py @@ -499,15 +499,23 @@ def handle_yview(self, event, *args): rmenu = None def right_menu_event(self, event): - self.text.tag_remove("sel", "1.0", "end") - self.text.mark_set("insert", "@%d,%d" % (event.x, event.y)) + text = self.text + newdex = text.index(f'@{event.x},{event.y}') + try: + in_selection = (text.compare('sel.first', '<=', newdex) and + text.compare(newdex, '<=', 'sel.last')) + except TclError: + in_selection = False + if not in_selection: + text.tag_remove("sel", "1.0", "end") + text.mark_set("insert", newdex) if not self.rmenu: self.make_rmenu() rmenu = self.rmenu self.event = event iswin = sys.platform[:3] == 'win' if iswin: - self.text.config(cursor="arrow") + text.config(cursor="arrow") for item in self.rmenu_specs: try: @@ -520,7 +528,6 @@ def right_menu_event(self, event): state = getattr(self, verify_state)() rmenu.entryconfigure(label, state=state) - rmenu.tk_popup(event.x_root, event.y_root) if iswin: self.text.config(cursor="ibeam") diff --git a/Lib/idlelib/idle_test/test_editor.py b/Lib/idlelib/idle_test/test_editor.py index 91e8ef89d1d72..443dcf021679f 100644 --- a/Lib/idlelib/idle_test/test_editor.py +++ b/Lib/idlelib/idle_test/test_editor.py @@ -5,6 +5,7 @@ from collections import namedtuple from test.support import requires from tkinter import Tk +from idlelib.idle_test.mock_idle import Func Editor = editor.EditorWindow @@ -92,6 +93,12 @@ def test_tabwidth_8(self): ) +def insert(text, string): + text.delete('1.0', 'end') + text.insert('end', string) + text.update() # Force update for colorizer to finish. + + class IndentAndNewlineTest(unittest.TestCase): @classmethod @@ -113,13 +120,6 @@ def tearDownClass(cls): cls.root.destroy() del cls.root - def insert(self, text): - t = self.window.text - t.delete('1.0', 'end') - t.insert('end', text) - # Force update for colorizer to finish. - t.update() - def test_indent_and_newline_event(self): eq = self.assertEqual w = self.window @@ -170,13 +170,13 @@ def test_indent_and_newline_event(self): w.prompt_last_line = '' for test in tests: with self.subTest(label=test.label): - self.insert(test.text) + insert(text, test.text) text.mark_set('insert', test.mark) nl(event=None) eq(get('1.0', 'end'), test.expected) # Selected text. - self.insert(' def f1(self, a, b):\n return a + b') + insert(text, ' def f1(self, a, b):\n return a + b') text.tag_add('sel', '1.17', '1.end') nl(None) # Deletes selected text before adding new line. @@ -184,11 +184,37 @@ def test_indent_and_newline_event(self): # Preserves the whitespace in shell prompt. w.prompt_last_line = '>>> ' - self.insert('>>> \t\ta =') + insert(text, '>>> \t\ta =') text.mark_set('insert', '1.5') nl(None) eq(get('1.0', 'end'), '>>> \na =\n') +class RMenuTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + cls.root.withdraw() + cls.window = Editor(root=cls.root) + + @classmethod + def tearDownClass(cls): + cls.window._close() + del cls.window + cls.root.update_idletasks() + for id in cls.root.tk.call('after', 'info'): + cls.root.after_cancel(id) + cls.root.destroy() + del cls.root + + class DummyRMenu: + def tk_popup(x, y): pass + + def test_rclick(self): + pass + + if __name__ == '__main__': unittest.main(verbosity=2) diff --git a/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst b/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst new file mode 100644 index 0000000000000..a847b75997117 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst @@ -0,0 +1,2 @@ +Make context menu Cut and Copy work again when right-clicking within a +selection. From webhook-mailer at python.org Fri May 29 21:57:11 2020 From: webhook-mailer at python.org (Samuel Gaist) Date: Sat, 30 May 2020 01:57:11 -0000 Subject: [Python-checkins] closes bpo-29017: Update the bindings for Qt information with PySide2 (GH-20149) Message-ID: https://github.com/python/cpython/commit/4649202ea75d48e1496e99911709824ca2d3170e commit: 4649202ea75d48e1496e99911709824ca2d3170e branch: master author: Samuel Gaist committer: GitHub date: 2020-05-29T20:57:03-05:00 summary: closes bpo-29017: Update the bindings for Qt information with PySide2 (GH-20149) Reference to PySide has been removed has it is for Qt 4, which has reached end of life. files: M Doc/library/othergui.rst diff --git a/Doc/library/othergui.rst b/Doc/library/othergui.rst index 4548459f8e261..48c1f2754111a 100644 --- a/Doc/library/othergui.rst +++ b/Doc/library/othergui.rst @@ -30,10 +30,11 @@ available for Python: for generating bindings for C++ libraries as Python classes, and is specifically designed for Python. - `PySide `_ - PySide is a newer binding to the Qt toolkit, provided by Nokia. - Compared to PyQt, its licensing scheme is friendlier to non-open source - applications. + `PySide2 `_ + Also known as the Qt for Python project, PySide2 is a newer binding to the + Qt toolkit. It is provided by The Qt Company and aims to provide a + complete port of PySide to Qt 5. Compared to PyQt, its licensing scheme is + friendlier to non-open source applications. `wxPython `_ wxPython is a cross-platform GUI toolkit for Python that is built around @@ -47,7 +48,7 @@ available for Python: an XML-based resource format and more, including an ever growing library of user-contributed modules. -PyGTK, PyQt, and wxPython, all have a modern look and feel and more +PyGTK, PyQt, PySide2, and wxPython, all have a modern look and feel and more widgets than Tkinter. In addition, there are many other GUI toolkits for Python, both cross-platform, and platform-specific. See the `GUI Programming `_ page in the Python Wiki for a From webhook-mailer at python.org Fri May 29 22:03:15 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 30 May 2020 02:03:15 -0000 Subject: [Python-checkins] closes bpo-29017: Update the bindings for Qt information with PySide2 (GH-20149) Message-ID: https://github.com/python/cpython/commit/f165647e3d2addd03d0393f4f5d31bd1cc9b74a2 commit: f165647e3d2addd03d0393f4f5d31bd1cc9b74a2 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-29T19:03:10-07:00 summary: closes bpo-29017: Update the bindings for Qt information with PySide2 (GH-20149) Reference to PySide has been removed has it is for Qt 4, which has reached end of life. (cherry picked from commit 4649202ea75d48e1496e99911709824ca2d3170e) Co-authored-by: Samuel Gaist files: M Doc/library/othergui.rst diff --git a/Doc/library/othergui.rst b/Doc/library/othergui.rst index 4548459f8e261..48c1f2754111a 100644 --- a/Doc/library/othergui.rst +++ b/Doc/library/othergui.rst @@ -30,10 +30,11 @@ available for Python: for generating bindings for C++ libraries as Python classes, and is specifically designed for Python. - `PySide `_ - PySide is a newer binding to the Qt toolkit, provided by Nokia. - Compared to PyQt, its licensing scheme is friendlier to non-open source - applications. + `PySide2 `_ + Also known as the Qt for Python project, PySide2 is a newer binding to the + Qt toolkit. It is provided by The Qt Company and aims to provide a + complete port of PySide to Qt 5. Compared to PyQt, its licensing scheme is + friendlier to non-open source applications. `wxPython `_ wxPython is a cross-platform GUI toolkit for Python that is built around @@ -47,7 +48,7 @@ available for Python: an XML-based resource format and more, including an ever growing library of user-contributed modules. -PyGTK, PyQt, and wxPython, all have a modern look and feel and more +PyGTK, PyQt, PySide2, and wxPython, all have a modern look and feel and more widgets than Tkinter. In addition, there are many other GUI toolkits for Python, both cross-platform, and platform-specific. See the `GUI Programming `_ page in the Python Wiki for a From webhook-mailer at python.org Fri May 29 22:04:28 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 30 May 2020 02:04:28 -0000 Subject: [Python-checkins] closes bpo-29017: Update the bindings for Qt information with PySide2 (GH-20149) Message-ID: https://github.com/python/cpython/commit/ef2f9acf8fc813f66523e7702654f919d20ff0c3 commit: ef2f9acf8fc813f66523e7702654f919d20ff0c3 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-29T19:04:24-07:00 summary: closes bpo-29017: Update the bindings for Qt information with PySide2 (GH-20149) Reference to PySide has been removed has it is for Qt 4, which has reached end of life. (cherry picked from commit 4649202ea75d48e1496e99911709824ca2d3170e) Co-authored-by: Samuel Gaist files: M Doc/library/othergui.rst diff --git a/Doc/library/othergui.rst b/Doc/library/othergui.rst index 4548459f8e261..48c1f2754111a 100644 --- a/Doc/library/othergui.rst +++ b/Doc/library/othergui.rst @@ -30,10 +30,11 @@ available for Python: for generating bindings for C++ libraries as Python classes, and is specifically designed for Python. - `PySide `_ - PySide is a newer binding to the Qt toolkit, provided by Nokia. - Compared to PyQt, its licensing scheme is friendlier to non-open source - applications. + `PySide2 `_ + Also known as the Qt for Python project, PySide2 is a newer binding to the + Qt toolkit. It is provided by The Qt Company and aims to provide a + complete port of PySide to Qt 5. Compared to PyQt, its licensing scheme is + friendlier to non-open source applications. `wxPython `_ wxPython is a cross-platform GUI toolkit for Python that is built around @@ -47,7 +48,7 @@ available for Python: an XML-based resource format and more, including an ever growing library of user-contributed modules. -PyGTK, PyQt, and wxPython, all have a modern look and feel and more +PyGTK, PyQt, PySide2, and wxPython, all have a modern look and feel and more widgets than Tkinter. In addition, there are many other GUI toolkits for Python, both cross-platform, and platform-specific. See the `GUI Programming `_ page in the Python Wiki for a From webhook-mailer at python.org Sat May 30 03:47:39 2020 From: webhook-mailer at python.org (Florian Dahlitz) Date: Sat, 30 May 2020 07:47:39 -0000 Subject: [Python-checkins] bpo-40798: Generate a different message for already removed elements (GH-20483) Message-ID: https://github.com/python/cpython/commit/735d902b363b759df9ff00e58bbf4f7e2bde78cd commit: 735d902b363b759df9ff00e58bbf4f7e2bde78cd branch: master author: Florian Dahlitz committer: GitHub date: 2020-05-30T09:47:32+02:00 summary: bpo-40798: Generate a different message for already removed elements (GH-20483) files: M Doc/tools/extensions/pyspecific.py M Doc/tools/templates/dummy.html diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index bc51555fa0512..46064fa3b6b00 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -311,7 +311,8 @@ class DeprecatedRemoved(Directive): final_argument_whitespace = True option_spec = {} - _label = 'Deprecated since version {deprecated}, will be removed in version {removed}' + _deprecated_label = 'Deprecated since version {deprecated}, will be removed in version {removed}' + _removed_label = 'Deprecated since version {deprecated}, removed in version {removed}' def run(self): node = addnodes.versionmodified() @@ -319,7 +320,15 @@ def run(self): node['type'] = 'deprecated-removed' version = (self.arguments[0], self.arguments[1]) node['version'] = version - label = translators['sphinx'].gettext(self._label) + env = self.state.document.settings.env + current_version = tuple(int(e) for e in env.config.version.split('.')) + removed_version = tuple(int(e) for e in self.arguments[1].split('.')) + if current_version < removed_version: + label = self._deprecated_label + else: + label = self._removed_label + + label = translators['sphinx'].gettext(label) text = label.format(deprecated=self.arguments[0], removed=self.arguments[1]) if len(self.arguments) == 3: inodes, messages = self.state.inline_text(self.arguments[2], diff --git a/Doc/tools/templates/dummy.html b/Doc/tools/templates/dummy.html index 8d94137b01b51..68ae3ad148ec2 100644 --- a/Doc/tools/templates/dummy.html +++ b/Doc/tools/templates/dummy.html @@ -5,3 +5,4 @@ {% trans %}CPython implementation detail:{% endtrans %} {% trans %}Deprecated since version {deprecated}, will be removed in version {removed}{% endtrans %} +{% trans %}Deprecated since version {deprecated}, removed in version {removed}{% endtrans %} From webhook-mailer at python.org Sat May 30 03:52:31 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 30 May 2020 07:52:31 -0000 Subject: [Python-checkins] bpo-40798: Generate a different message for already removed elements (GH-20483) Message-ID: https://github.com/python/cpython/commit/a9dbae434f26b2c419a1cd0a8233143f40fd00db commit: a9dbae434f26b2c419a1cd0a8233143f40fd00db branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-30T00:52:27-07:00 summary: bpo-40798: Generate a different message for already removed elements (GH-20483) (cherry picked from commit 735d902b363b759df9ff00e58bbf4f7e2bde78cd) Co-authored-by: Florian Dahlitz files: M Doc/tools/extensions/pyspecific.py M Doc/tools/templates/dummy.html diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 466e84c26147c..3b6fa90059160 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -236,7 +236,8 @@ class DeprecatedRemoved(Directive): final_argument_whitespace = True option_spec = {} - _label = 'Deprecated since version {deprecated}, will be removed in version {removed}' + _deprecated_label = 'Deprecated since version {deprecated}, will be removed in version {removed}' + _removed_label = 'Deprecated since version {deprecated}, removed in version {removed}' def run(self): node = addnodes.versionmodified() @@ -244,7 +245,15 @@ def run(self): node['type'] = 'deprecated-removed' version = (self.arguments[0], self.arguments[1]) node['version'] = version - label = translators['sphinx'].gettext(self._label) + env = self.state.document.settings.env + current_version = tuple(int(e) for e in env.config.version.split('.')) + removed_version = tuple(int(e) for e in self.arguments[1].split('.')) + if current_version < removed_version: + label = self._deprecated_label + else: + label = self._removed_label + + label = translators['sphinx'].gettext(label) text = label.format(deprecated=self.arguments[0], removed=self.arguments[1]) if len(self.arguments) == 3: inodes, messages = self.state.inline_text(self.arguments[2], diff --git a/Doc/tools/templates/dummy.html b/Doc/tools/templates/dummy.html index 8d94137b01b51..68ae3ad148ec2 100644 --- a/Doc/tools/templates/dummy.html +++ b/Doc/tools/templates/dummy.html @@ -5,3 +5,4 @@ {% trans %}CPython implementation detail:{% endtrans %} {% trans %}Deprecated since version {deprecated}, will be removed in version {removed}{% endtrans %} +{% trans %}Deprecated since version {deprecated}, removed in version {removed}{% endtrans %} From webhook-mailer at python.org Sat May 30 03:55:03 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 30 May 2020 07:55:03 -0000 Subject: [Python-checkins] bpo-40798: Generate a different message for already removed elements (GH-20483) Message-ID: https://github.com/python/cpython/commit/ba1c2c85b39fbcb31584c20f8a63fb87f9cb9c02 commit: ba1c2c85b39fbcb31584c20f8a63fb87f9cb9c02 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-05-30T00:54:58-07:00 summary: bpo-40798: Generate a different message for already removed elements (GH-20483) (cherry picked from commit 735d902b363b759df9ff00e58bbf4f7e2bde78cd) Co-authored-by: Florian Dahlitz files: M Doc/tools/extensions/pyspecific.py M Doc/tools/templates/dummy.html diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 2d95612415942..a128c705847e9 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -311,7 +311,8 @@ class DeprecatedRemoved(Directive): final_argument_whitespace = True option_spec = {} - _label = 'Deprecated since version {deprecated}, will be removed in version {removed}' + _deprecated_label = 'Deprecated since version {deprecated}, will be removed in version {removed}' + _removed_label = 'Deprecated since version {deprecated}, removed in version {removed}' def run(self): node = addnodes.versionmodified() @@ -319,7 +320,15 @@ def run(self): node['type'] = 'deprecated-removed' version = (self.arguments[0], self.arguments[1]) node['version'] = version - label = translators['sphinx'].gettext(self._label) + env = self.state.document.settings.env + current_version = tuple(int(e) for e in env.config.version.split('.')) + removed_version = tuple(int(e) for e in self.arguments[1].split('.')) + if current_version < removed_version: + label = self._deprecated_label + else: + label = self._removed_label + + label = translators['sphinx'].gettext(label) text = label.format(deprecated=self.arguments[0], removed=self.arguments[1]) if len(self.arguments) == 3: inodes, messages = self.state.inline_text(self.arguments[2], diff --git a/Doc/tools/templates/dummy.html b/Doc/tools/templates/dummy.html index 8d94137b01b51..68ae3ad148ec2 100644 --- a/Doc/tools/templates/dummy.html +++ b/Doc/tools/templates/dummy.html @@ -5,3 +5,4 @@ {% trans %}CPython implementation detail:{% endtrans %} {% trans %}Deprecated since version {deprecated}, will be removed in version {removed}{% endtrans %} +{% trans %}Deprecated since version {deprecated}, removed in version {removed}{% endtrans %} From webhook-mailer at python.org Sat May 30 04:22:07 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Sat, 30 May 2020 08:22:07 -0000 Subject: [Python-checkins] bpo-40061: Fix a possible refleak in _asynciomodule.c (GH-19748) Message-ID: https://github.com/python/cpython/commit/7b78e7f9fd77bb3280ee39fb74b86772a7d46a70 commit: 7b78e7f9fd77bb3280ee39fb74b86772a7d46a70 branch: master author: Zackery Spytz committer: GitHub date: 2020-05-30T01:22:02-07:00 summary: bpo-40061: Fix a possible refleak in _asynciomodule.c (GH-19748) tup should be decrefed in the unlikely event of a PyList_New() failure. files: M Modules/_asynciomodule.c diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 0608c40f6c339..0454f9c6824bf 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -710,6 +710,7 @@ future_add_done_callback(FutureObj *fut, PyObject *arg, PyObject *ctx) else { fut->fut_callbacks = PyList_New(1); if (fut->fut_callbacks == NULL) { + Py_DECREF(tup); return NULL; } From webhook-mailer at python.org Sat May 30 18:15:13 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 30 May 2020 22:15:13 -0000 Subject: [Python-checkins] bpo-40829: Add a what's new entry about deprecation of shuffle's random parameter (GH-20541) Message-ID: https://github.com/python/cpython/commit/007bb06a2de9e64fa978f5dd9131d0100227b4cf commit: 007bb06a2de9e64fa978f5dd9131d0100227b4cf branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-30T15:15:06-07:00 summary: bpo-40829: Add a what's new entry about deprecation of shuffle's random parameter (GH-20541) files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 6c3cbbe641b57..7f81074b2b55f 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -761,6 +761,9 @@ Deprecated `parso`_. (Contributed by Carl Meyer in :issue:`40360`.) +* The *random* parameter of :func:`random.shuffle` has been deprecated. + (Contributed by Raymond Hettinger in :issue:`40465`) + .. _LibCST: https://libcst.readthedocs.io/ .. _parso: https://parso.readthedocs.io/ From webhook-mailer at python.org Sun May 31 03:07:19 2020 From: webhook-mailer at python.org (Kyle Stanley) Date: Sun, 31 May 2020 07:07:19 -0000 Subject: [Python-checkins] Fix asyncio.to_thread() documented return type (GH-20547) Message-ID: https://github.com/python/cpython/commit/2b201369b435a4266bda5b895e3b615dbe28ea6e commit: 2b201369b435a4266bda5b895e3b615dbe28ea6e branch: master author: Kyle Stanley committer: GitHub date: 2020-05-31T00:07:04-07:00 summary: Fix asyncio.to_thread() documented return type (GH-20547) When I wrote the documentation for `asyncio.to_thread()`, I mistakenly assumed that `return await loop.run_in_executor(...)` within an async def function would return a Future. In reality, it returns a coroutine. This likely won't affect typical usage of `asyncio.to_thread()`, but it's important for the documentation to be correct here. In general, we also tend to avoid returning futures from high-level APIs in asyncio. files: M Doc/library/asyncio-task.rst M Lib/asyncio/threads.py diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 847363b134a7a..21824ca537f77 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -614,8 +614,7 @@ Running in Threads allowing context variables from the event loop thread to be accessed in the separate thread. - Return an :class:`asyncio.Future` which represents the eventual result of - *func*. + Return a coroutine that can be awaited to get the eventual result of *func*. This coroutine function is primarily intended to be used for executing IO-bound functions/methods that would otherwise block the event loop if diff --git a/Lib/asyncio/threads.py b/Lib/asyncio/threads.py index 51e0ba95d822e..34b7513a42090 100644 --- a/Lib/asyncio/threads.py +++ b/Lib/asyncio/threads.py @@ -17,7 +17,7 @@ async def to_thread(func, /, *args, **kwargs): allowing context variables from the main thread to be accessed in the separate thread. - Return an asyncio.Future which represents the eventual result of *func*. + Return a coroutine that can be awaited to get the eventual result of *func*. """ loop = events.get_running_loop() ctx = contextvars.copy_context() From webhook-mailer at python.org Sun May 31 17:57:49 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sun, 31 May 2020 21:57:49 -0000 Subject: [Python-checkins] bpo-40755: Add rich comparisons to Counter (GH-20548) Message-ID: https://github.com/python/cpython/commit/b7d79b4f36787874128c439d38397fe95c48429b commit: b7d79b4f36787874128c439d38397fe95c48429b branch: master author: Raymond Hettinger committer: GitHub date: 2020-05-31T14:57:42-07:00 summary: bpo-40755: Add rich comparisons to Counter (GH-20548) files: A Misc/NEWS.d/next/Library/2020-05-30-18-48-58.bpo-40755.IyOe2J.rst D Misc/NEWS.d/next/Library/2020-05-23-18-24-13.bpo-22533.k64XGo.rst M Doc/library/collections.rst M Lib/collections/__init__.py M Lib/test/test_collections.py diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index ea2b420292eb0..f538da5e1c9fa 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -290,47 +290,6 @@ For example:: >>> sorted(c.elements()) ['a', 'a', 'a', 'a', 'b', 'b'] - .. method:: isdisjoint(other) - - True if none of the elements in *self* overlap with those in *other*. - Negative or missing counts are ignored. - Logically equivalent to: ``not (+self) & (+other)`` - - .. versionadded:: 3.10 - - .. method:: isequal(other) - - Test whether counts agree exactly. - Negative or missing counts are treated as zero. - - This method works differently than the inherited :meth:`__eq__` method - which treats negative or missing counts as distinct from zero:: - - >>> Counter(a=1, b=0).isequal(Counter(a=1)) - True - >>> Counter(a=1, b=0) == Counter(a=1) - False - - Logically equivalent to: ``+self == +other`` - - .. versionadded:: 3.10 - - .. method:: issubset(other) - - True if the counts in *self* are less than or equal to those in *other*. - Negative or missing counts are treated as zero. - Logically equivalent to: ``not self - (+other)`` - - .. versionadded:: 3.10 - - .. method:: issuperset(other) - - True if the counts in *self* are greater than or equal to those in *other*. - Negative or missing counts are treated as zero. - Logically equivalent to: ``not other - (+self)`` - - .. versionadded:: 3.10 - .. method:: most_common([n]) Return a list of the *n* most common elements and their counts from the @@ -369,6 +328,19 @@ For example:: instead of replacing them. Also, the *iterable* is expected to be a sequence of elements, not a sequence of ``(key, value)`` pairs. +Counters support rich comparison operators for equality, subset, and +superset relationships: ``==``, ``!=``, ``<``, ``<=``, ``>``, ``>=``. +All of those tests treat missing elements as having zero counts so that +``Counter(a=1) == Counter(a=1, b=0)`` returns true. + +.. versionadded:: 3.10 + Rich comparison operations we were added + +.. versionchanged:: 3.10 + In equality tests, missing elements are treated as having zero counts. + Formerly, ``Counter(a=3)`` and ``Counter(a=3, b=0)`` were considered + distinct. + Common patterns for working with :class:`Counter` objects:: sum(c.values()) # total of all counts diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index 55fb46c9bc157..2acf67289f225 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -691,6 +691,42 @@ def __delitem__(self, elem): if elem in self: super().__delitem__(elem) + def __eq__(self, other): + 'True if all counts agree. Missing counts are treated as zero.' + if not isinstance(other, Counter): + return NotImplemented + return all(self[e] == other[e] for c in (self, other) for e in c) + + def __ne__(self, other): + 'True if any counts disagree. Missing counts are treated as zero.' + if not isinstance(other, Counter): + return NotImplemented + return not self == other + + def __le__(self, other): + 'True if all counts in self are a subset of those in other.' + if not isinstance(other, Counter): + return NotImplemented + return all(self[e] <= other[e] for c in (self, other) for e in c) + + def __lt__(self, other): + 'True if all counts in self are a proper subset of those in other.' + if not isinstance(other, Counter): + return NotImplemented + return self <= other and self != other + + def __ge__(self, other): + 'True if all counts in self are a superset of those in other.' + if not isinstance(other, Counter): + return NotImplemented + return all(self[e] >= other[e] for c in (self, other) for e in c) + + def __gt__(self, other): + 'True if all counts in self are a proper superset of those in other.' + if not isinstance(other, Counter): + return NotImplemented + return self >= other and self != other + def __repr__(self): if not self: return '%s()' % self.__class__.__name__ @@ -886,92 +922,6 @@ def __iand__(self, other): self[elem] = other_count return self._keep_positive() - def isequal(self, other): - ''' Test whether counts agree exactly. - - Negative or missing counts are treated as zero. - - This is different than the inherited __eq__() method which - treats negative or missing counts as distinct from zero: - - >>> Counter(a=1, b=0).isequal(Counter(a=1)) - True - >>> Counter(a=1, b=0) == Counter(a=1) - False - - Logically equivalent to: +self == +other - ''' - if not isinstance(other, Counter): - other = Counter(other) - for elem in set(self) | set(other): - left = self[elem] - right = other[elem] - if left == right: - continue - if left < 0: - left = 0 - if right < 0: - right = 0 - if left != right: - return False - return True - - def issubset(self, other): - '''True if the counts in self are less than or equal to those in other. - - Negative or missing counts are treated as zero. - - Logically equivalent to: not self - (+other) - ''' - if not isinstance(other, Counter): - other = Counter(other) - for elem, count in self.items(): - other_count = other[elem] - if other_count < 0: - other_count = 0 - if count > other_count: - return False - return True - - def issuperset(self, other): - '''True if the counts in self are greater than or equal to those in other. - - Negative or missing counts are treated as zero. - - Logically equivalent to: not other - (+self) - ''' - if not isinstance(other, Counter): - other = Counter(other) - return other.issubset(self) - - def isdisjoint(self, other): - '''True if none of the elements in self overlap with those in other. - - Negative or missing counts are ignored. - - Logically equivalent to: not (+self) & (+other) - ''' - if not isinstance(other, Counter): - other = Counter(other) - for elem, count in self.items(): - if count > 0 and other[elem] > 0: - return False - return True - - # Rich comparison operators for multiset subset and superset tests - # have been deliberately omitted due to semantic conflicts with the - # existing inherited dict equality method. Subset and superset - # semantics ignore zero counts and require that p?q ? p?q ? p=q; - # however, that would not be the case for p=Counter(a=1, b=0) - # and q=Counter(a=1) where the dictionaries are not equal. - - def _omitted(self, other): - raise TypeError( - 'Rich comparison operators have been deliberately omitted. ' - 'Use the isequal(), issubset(), and issuperset() methods instead.') - - __lt__ = __le__ = __gt__ = __ge__ = __lt__ = _omitted - ######################################################################## ### ChainMap diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py index 8d80e88673b89..7c7f8655b0fbd 100644 --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -2123,29 +2123,6 @@ def test_multiset_operations(self): set_result = setop(set(p.elements()), set(q.elements())) self.assertEqual(counter_result, dict.fromkeys(set_result, 1)) - def test_subset_superset_not_implemented(self): - # Verify that multiset comparison operations are not implemented. - - # These operations were intentionally omitted because multiset - # comparison semantics conflict with existing dict equality semantics. - - # For multisets, we would expect that if p<=q and p>=q are both true, - # then p==q. However, dict equality semantics require that p!=q when - # one of sets contains an element with a zero count and the other - # doesn't. - - p = Counter(a=1, b=0) - q = Counter(a=1, c=0) - self.assertNotEqual(p, q) - with self.assertRaises(TypeError): - p < q - with self.assertRaises(TypeError): - p <= q - with self.assertRaises(TypeError): - p > q - with self.assertRaises(TypeError): - p >= q - def test_inplace_operations(self): elements = 'abcd' for i in range(1000): @@ -2234,49 +2211,32 @@ def test_multiset_operations_equivalent_to_set_operations(self): self.assertEqual(set(cp - cq), sp - sq) self.assertEqual(set(cp | cq), sp | sq) self.assertEqual(set(cp & cq), sp & sq) - self.assertEqual(cp.isequal(cq), sp == sq) - self.assertEqual(cp.issubset(cq), sp.issubset(sq)) - self.assertEqual(cp.issuperset(cq), sp.issuperset(sq)) - self.assertEqual(cp.isdisjoint(cq), sp.isdisjoint(sq)) - - def test_multiset_equal(self): - self.assertTrue(Counter(a=3, b=2, c=0).isequal('ababa')) - self.assertFalse(Counter(a=3, b=2).isequal('babab')) - - def test_multiset_subset(self): - self.assertTrue(Counter(a=3, b=2, c=0).issubset('ababa')) - self.assertFalse(Counter(a=3, b=2).issubset('babab')) - - def test_multiset_superset(self): - self.assertTrue(Counter(a=3, b=2, c=0).issuperset('aab')) - self.assertFalse(Counter(a=3, b=2, c=0).issuperset('aabd')) - - def test_multiset_disjoint(self): - self.assertTrue(Counter(a=3, b=2, c=0).isdisjoint('cde')) - self.assertFalse(Counter(a=3, b=2, c=0).isdisjoint('bcd')) - - def test_multiset_predicates_with_negative_counts(self): - # Multiset predicates run on the output of the elements() method, - # meaning that zero counts and negative counts are ignored. - # The tests below confirm that we get that same results as the - # tests above, even after a negative count has been included - # in either *self* or *other*. - self.assertTrue(Counter(a=3, b=2, c=0, d=-1).isequal('ababa')) - self.assertFalse(Counter(a=3, b=2, d=-1).isequal('babab')) - self.assertTrue(Counter(a=3, b=2, c=0, d=-1).issubset('ababa')) - self.assertFalse(Counter(a=3, b=2, d=-1).issubset('babab')) - self.assertTrue(Counter(a=3, b=2, c=0, d=-1).issuperset('aab')) - self.assertFalse(Counter(a=3, b=2, c=0, d=-1).issuperset('aabd')) - self.assertTrue(Counter(a=3, b=2, c=0, d=-1).isdisjoint('cde')) - self.assertFalse(Counter(a=3, b=2, c=0, d=-1).isdisjoint('bcd')) - self.assertTrue(Counter(a=3, b=2, c=0, d=-1).isequal(Counter(a=3, b=2, c=-1))) - self.assertFalse(Counter(a=3, b=2, d=-1).isequal(Counter(a=2, b=3, c=-1))) - self.assertTrue(Counter(a=3, b=2, c=0, d=-1).issubset(Counter(a=3, b=2, c=-1))) - self.assertFalse(Counter(a=3, b=2, d=-1).issubset(Counter(a=2, b=3, c=-1))) - self.assertTrue(Counter(a=3, b=2, c=0, d=-1).issuperset(Counter(a=2, b=1, c=-1))) - self.assertFalse(Counter(a=3, b=2, c=0, d=-1).issuperset(Counter(a=2, b=1, c=-1, d=1))) - self.assertTrue(Counter(a=3, b=2, c=0, d=-1).isdisjoint(Counter(c=1, d=2, e=3, f=-1))) - self.assertFalse(Counter(a=3, b=2, c=0, d=-1).isdisjoint(Counter(b=1, c=1, d=1, e=-1))) + self.assertEqual(cp == cq, sp == sq) + self.assertEqual(cp != cq, sp != sq) + self.assertEqual(cp <= cq, sp <= sq) + self.assertEqual(cp >= cq, sp >= sq) + self.assertEqual(cp < cq, sp < sq) + self.assertEqual(cp > cq, sp > sq) + + def test_eq(self): + self.assertEqual(Counter(a=3, b=2, c=0), Counter('ababa')) + self.assertNotEqual(Counter(a=3, b=2), Counter('babab')) + + def test_le(self): + self.assertTrue(Counter(a=3, b=2, c=0) <= Counter('ababa')) + self.assertFalse(Counter(a=3, b=2) <= Counter('babab')) + + def test_lt(self): + self.assertTrue(Counter(a=3, b=1, c=0) < Counter('ababa')) + self.assertFalse(Counter(a=3, b=2, c=0) < Counter('ababa')) + + def test_ge(self): + self.assertTrue(Counter(a=2, b=1, c=0) >= Counter('aab')) + self.assertFalse(Counter(a=3, b=2, c=0) >= Counter('aabd')) + + def test_gt(self): + self.assertTrue(Counter(a=3, b=2, c=0) > Counter('aab')) + self.assertFalse(Counter(a=2, b=1, c=0) > Counter('aab')) ################################################################################ diff --git a/Misc/NEWS.d/next/Library/2020-05-23-18-24-13.bpo-22533.k64XGo.rst b/Misc/NEWS.d/next/Library/2020-05-23-18-24-13.bpo-22533.k64XGo.rst deleted file mode 100644 index 737162f7e12b2..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-23-18-24-13.bpo-22533.k64XGo.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add multiset comparison methods to collections.Counter(): isequal(), -issubset(), issuperset(), and isdisjoint(). diff --git a/Misc/NEWS.d/next/Library/2020-05-30-18-48-58.bpo-40755.IyOe2J.rst b/Misc/NEWS.d/next/Library/2020-05-30-18-48-58.bpo-40755.IyOe2J.rst new file mode 100644 index 0000000000000..be5653ea58f27 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-30-18-48-58.bpo-40755.IyOe2J.rst @@ -0,0 +1 @@ +Add rich comparisons to collections.Counter(). From webhook-mailer at python.org Sun May 31 18:01:58 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sun, 31 May 2020 22:01:58 -0000 Subject: [Python-checkins] bpo-40759: Deprecate the symbol module (GH-20364) Message-ID: https://github.com/python/cpython/commit/cf88871d6a9c12e7b7e5f4d65abc2ec6e2fe952e commit: cf88871d6a9c12e7b7e5f4d65abc2ec6e2fe952e branch: master author: Batuhan Taskaya committer: GitHub date: 2020-05-31T15:01:50-07:00 summary: bpo-40759: Deprecate the symbol module (GH-20364) Automerge-Triggered-By: @pablogsal files: A Misc/NEWS.d/next/Library/2020-05-24-23-52-35.bpo-40759.DdZdaw.rst M Doc/library/symbol.rst M Doc/whatsnew/3.9.rst M Lib/symbol.py diff --git a/Doc/library/symbol.rst b/Doc/library/symbol.rst index 44996936e2d28..d56600af29d6e 100644 --- a/Doc/library/symbol.rst +++ b/Doc/library/symbol.rst @@ -17,6 +17,11 @@ the definitions of the names in the context of the language grammar. The specific numeric values which the names map to may change between Python versions. +.. warning:: + + The symbol module is deprecated and will be removed in future versions of + Python. + This module also provides one additional data object: diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 7f81074b2b55f..6ace7a4253f18 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -692,9 +692,10 @@ Deprecated Python versions it will raise a :exc:`TypeError` for all floats. (Contributed by Serhiy Storchaka in :issue:`37315`.) -* The :mod:`parser` module is deprecated and will be removed in future versions - of Python. For the majority of use cases, users can leverage the Abstract Syntax - Tree (AST) generation and compilation stage, using the :mod:`ast` module. +* The :mod:`parser` and :mod:`symbol` modules are deprecated and will be + removed in future versions of Python. For the majority of use cases, + users can leverage the Abstract Syntax Tree (AST) generation and compilation + stage, using the :mod:`ast` module. * Using :data:`NotImplemented` in a boolean context has been deprecated, as it is almost exclusively the result of incorrect rich comparator diff --git a/Lib/symbol.py b/Lib/symbol.py index 36e0eec7ac1f5..aaac8c914431b 100644 --- a/Lib/symbol.py +++ b/Lib/symbol.py @@ -11,6 +11,15 @@ # # make regen-symbol +import warnings + +warnings.warn( + "The symbol module is deprecated and will be removed " + "in future versions of Python", + DeprecationWarning, + stacklevel=2, +) + #--start constants-- single_input = 256 file_input = 257 diff --git a/Misc/NEWS.d/next/Library/2020-05-24-23-52-35.bpo-40759.DdZdaw.rst b/Misc/NEWS.d/next/Library/2020-05-24-23-52-35.bpo-40759.DdZdaw.rst new file mode 100644 index 0000000000000..e77da3ac3dfa9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-24-23-52-35.bpo-40759.DdZdaw.rst @@ -0,0 +1 @@ +Deprecate the :mod:`symbol` module. From webhook-mailer at python.org Sun May 31 19:28:53 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sun, 31 May 2020 23:28:53 -0000 Subject: [Python-checkins] Fix typo in "What's new in Python 3.9" (GH-20559) Message-ID: https://github.com/python/cpython/commit/491a3d3a75b656c8317d8ce343aea767978b946c commit: 491a3d3a75b656c8317d8ce343aea767978b946c branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-05-31T16:28:46-07:00 summary: Fix typo in "What's new in Python 3.9" (GH-20559) Automerge-Triggered-By: @pablogsal files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 6ace7a4253f18..3d5cec6026add 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -156,7 +156,7 @@ back to the LL(1) parser using a command line switch (``-X oldparser``) or an environment variable (``PYTHONOLDPARSER=1``). See :pep:`617` for more details. (Contributed by Guido van Rossum, -Pablo Galindo and Lysandros Nikolau in :issue:`40334`.) +Pablo Galindo and Lysandros Nikolaou in :issue:`40334`.) Other Language Changes From webhook-mailer at python.org Sun May 31 19:41:18 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sun, 31 May 2020 23:41:18 -0000 Subject: [Python-checkins] bpo-17005: Move topological sort functionality to its own module (GH-20558) Message-ID: https://github.com/python/cpython/commit/2f172d8f1525defe9bba4d49e967fdfc69151731 commit: 2f172d8f1525defe9bba4d49e967fdfc69151731 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-01T00:41:14+01:00 summary: bpo-17005: Move topological sort functionality to its own module (GH-20558) The topological sort functionality that was introduced initially in the functools module has been moved to a new graphlib module to better accommodate the new tools and keep the original scope of the functools module. files: A Doc/library/graphlib.rst A Lib/graphlib.py A Lib/test/test_graphlib.py A Misc/NEWS.d/next/Library/2020-05-31-23-32-36.bpo-17005.JlRUGB.rst M Doc/library/datatypes.rst M Doc/library/functools.rst M Doc/whatsnew/3.9.rst M Lib/functools.py M Lib/test/test_functools.py M PCbuild/lib.pyproj diff --git a/Doc/library/datatypes.rst b/Doc/library/datatypes.rst index 675bbb6fafdca..ff51b2779e5fa 100644 --- a/Doc/library/datatypes.rst +++ b/Doc/library/datatypes.rst @@ -33,3 +33,4 @@ The following modules are documented in this chapter: pprint.rst reprlib.rst enum.rst + graphlib.rst diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index a44eb85b27dba..14aa184e2cd14 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -543,184 +543,6 @@ The :mod:`functools` module defines the following functions: .. versionadded:: 3.8 -.. class:: TopologicalSorter(graph=None) - - Provides functionality to topologically sort a graph of hashable nodes. - - A topological order is a linear ordering of the vertices in a graph such that - for every directed edge u -> v from vertex u to vertex v, vertex u comes - before vertex v in the ordering. For instance, the vertices of the graph may - represent tasks to be performed, and the edges may represent constraints that - one task must be performed before another; in this example, a topological - ordering is just a valid sequence for the tasks. A complete topological - ordering is possible if and only if the graph has no directed cycles, that - is, if it is a directed acyclic graph. - - If the optional *graph* argument is provided it must be a dictionary - representing a directed acyclic graph where the keys are nodes and the values - are iterables of all predecessors of that node in the graph (the nodes that - have edges that point to the value in the key). Additional nodes can be added - to the graph using the :meth:`~TopologicalSorter.add` method. - - In the general case, the steps required to perform the sorting of a given - graph are as follows: - - * Create an instance of the :class:`TopologicalSorter` with an optional - initial graph. - * Add additional nodes to the graph. - * Call :meth:`~TopologicalSorter.prepare` on the graph. - * While :meth:`~TopologicalSorter.is_active` is ``True``, iterate over - the nodes returned by :meth:`~TopologicalSorter.get_ready` and - process them. Call :meth:`~TopologicalSorter.done` on each node as it - finishes processing. - - In case just an immediate sorting of the nodes in the graph is required and - no parallelism is involved, the convenience method - :meth:`TopologicalSorter.static_order` can be used directly: - - .. doctest:: - - >>> graph = {"D": {"B", "C"}, "C": {"A"}, "B": {"A"}} - >>> ts = TopologicalSorter(graph) - >>> tuple(ts.static_order()) - ('A', 'C', 'B', 'D') - - The class is designed to easily support parallel processing of the nodes as - they become ready. For instance:: - - topological_sorter = TopologicalSorter() - - # Add nodes to 'topological_sorter'... - - topological_sorter.prepare() - while topological_sorter.is_active(): - for node in topological_sorter.get_ready(): - # Worker threads or processes take nodes to work on off the - # 'task_queue' queue. - task_queue.put(node) - - # When the work for a node is done, workers put the node in - # 'finalized_tasks_queue' so we can get more nodes to work on. - # The definition of 'is_active()' guarantees that, at this point, at - # least one node has been placed on 'task_queue' that hasn't yet - # been passed to 'done()', so this blocking 'get()' must (eventually) - # succeed. After calling 'done()', we loop back to call 'get_ready()' - # again, so put newly freed nodes on 'task_queue' as soon as - # logically possible. - node = finalized_tasks_queue.get() - topological_sorter.done(node) - - .. method:: add(node, *predecessors) - - Add a new node and its predecessors to the graph. Both the *node* and all - elements in *predecessors* must be hashable. - - If called multiple times with the same node argument, the set of - dependencies will be the union of all dependencies passed in. - - It is possible to add a node with no dependencies (*predecessors* is not - provided) or to provide a dependency twice. If a node that has not been - provided before is included among *predecessors* it will be automatically - added to the graph with no predecessors of its own. - - Raises :exc:`ValueError` if called after :meth:`~TopologicalSorter.prepare`. - - .. method:: prepare() - - Mark the graph as finished and check for cycles in the graph. If any cycle - is detected, :exc:`CycleError` will be raised, but - :meth:`~TopologicalSorter.get_ready` can still be used to obtain as many - nodes as possible until cycles block more progress. After a call to this - function, the graph cannot be modified, and therefore no more nodes can be - added using :meth:`~TopologicalSorter.add`. - - .. method:: is_active() - - Returns ``True`` if more progress can be made and ``False`` otherwise. - Progress can be made if cycles do not block the resolution and either - there are still nodes ready that haven't yet been returned by - :meth:`TopologicalSorter.get_ready` or the number of nodes marked - :meth:`TopologicalSorter.done` is less than the number that have been - returned by :meth:`TopologicalSorter.get_ready`. - - The :meth:`~TopologicalSorter.__bool__` method of this class defers to - this function, so instead of:: - - if ts.is_active(): - ... - - if possible to simply do:: - - if ts: - ... - - Raises :exc:`ValueError` if called without calling - :meth:`~TopologicalSorter.prepare` previously. - - .. method:: done(*nodes) - - Marks a set of nodes returned by :meth:`TopologicalSorter.get_ready` as - processed, unblocking any successor of each node in *nodes* for being - returned in the future by a call to :meth:`TopologicalSorter.get_ready`. - - Raises :exc:`ValueError` if any node in *nodes* has already been marked as - processed by a previous call to this method or if a node was not added to - the graph by using :meth:`TopologicalSorter.add`, if called without - calling :meth:`~TopologicalSorter.prepare` or if node has not yet been - returned by :meth:`~TopologicalSorter.get_ready`. - - .. method:: get_ready() - - Returns a ``tuple`` with all the nodes that are ready. Initially it - returns all nodes with no predecessors, and once those are marked as - processed by calling :meth:`TopologicalSorter.done`, further calls will - return all new nodes that have all their predecessors already processed. - Once no more progress can be made, empty tuples are returned. - - Raises :exc:`ValueError` if called without calling - :meth:`~TopologicalSorter.prepare` previously. - - .. method:: static_order() - - Returns an iterable of nodes in a topological order. Using this method - does not require to call :meth:`TopologicalSorter.prepare` or - :meth:`TopologicalSorter.done`. This method is equivalent to:: - - def static_order(self): - self.prepare() - while self.is_active(): - node_group = self.get_ready() - yield from node_group - self.done(*node_group) - - The particular order that is returned may depend on the specific order in - which the items were inserted in the graph. For example: - - .. doctest:: - - >>> ts = TopologicalSorter() - >>> ts.add(3, 2, 1) - >>> ts.add(1, 0) - >>> print([*ts.static_order()]) - [2, 0, 1, 3] - - >>> ts2 = TopologicalSorter() - >>> ts2.add(1, 0) - >>> ts2.add(3, 2, 1) - >>> print([*ts2.static_order()]) - [0, 2, 1, 3] - - This is due to the fact that "0" and "2" are in the same level in the - graph (they would have been returned in the same call to - :meth:`~TopologicalSorter.get_ready`) and the order between them is - determined by the order of insertion. - - - If any cycle is detected, :exc:`CycleError` will be raised. - - .. versionadded:: 3.9 - - .. function:: update_wrapper(wrapper, wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES) Update a *wrapper* function to look like the *wrapped* function. The optional @@ -829,20 +651,4 @@ callable, weak referencable, and can have attributes. There are some important differences. For instance, the :attr:`~definition.__name__` and :attr:`__doc__` attributes are not created automatically. Also, :class:`partial` objects defined in classes behave like static methods and do not transform into bound methods -during instance attribute look-up. - - -Exceptions ----------- -The :mod:`functools` module defines the following exception classes: - -.. exception:: CycleError - - Subclass of :exc:`ValueError` raised by :meth:`TopologicalSorter.prepare` if cycles exist - in the working graph. If multiple cycles exist, only one undefined choice among them will - be reported and included in the exception. - - The detected cycle can be accessed via the second element in the :attr:`~CycleError.args` - attribute of the exception instance and consists in a list of nodes, such that each node is, - in the graph, an immediate predecessor of the next node in the list. In the reported list, - the first and the last node will be the same, to make it clear that it is cyclic. +during instance attribute look-up. \ No newline at end of file diff --git a/Doc/library/graphlib.rst b/Doc/library/graphlib.rst new file mode 100644 index 0000000000000..820615e723015 --- /dev/null +++ b/Doc/library/graphlib.rst @@ -0,0 +1,209 @@ +:mod:`graphlib` --- Functionality to operate with graph-like structures +========================================================================= + +.. module:: graphlib + :synopsis: Functionality to operate with graph-like structures + + +**Source code:** :source:`Lib/graphlib.py` + +.. testsetup:: default + + import graphlib + from graphlib import * + +-------------- + + +.. class:: TopologicalSorter(graph=None) + + Provides functionality to topologically sort a graph of hashable nodes. + + A topological order is a linear ordering of the vertices in a graph such that + for every directed edge u -> v from vertex u to vertex v, vertex u comes + before vertex v in the ordering. For instance, the vertices of the graph may + represent tasks to be performed, and the edges may represent constraints that + one task must be performed before another; in this example, a topological + ordering is just a valid sequence for the tasks. A complete topological + ordering is possible if and only if the graph has no directed cycles, that + is, if it is a directed acyclic graph. + + If the optional *graph* argument is provided it must be a dictionary + representing a directed acyclic graph where the keys are nodes and the values + are iterables of all predecessors of that node in the graph (the nodes that + have edges that point to the value in the key). Additional nodes can be added + to the graph using the :meth:`~TopologicalSorter.add` method. + + In the general case, the steps required to perform the sorting of a given + graph are as follows: + + * Create an instance of the :class:`TopologicalSorter` with an optional + initial graph. + * Add additional nodes to the graph. + * Call :meth:`~TopologicalSorter.prepare` on the graph. + * While :meth:`~TopologicalSorter.is_active` is ``True``, iterate over + the nodes returned by :meth:`~TopologicalSorter.get_ready` and + process them. Call :meth:`~TopologicalSorter.done` on each node as it + finishes processing. + + In case just an immediate sorting of the nodes in the graph is required and + no parallelism is involved, the convenience method + :meth:`TopologicalSorter.static_order` can be used directly: + + .. doctest:: + + >>> graph = {"D": {"B", "C"}, "C": {"A"}, "B": {"A"}} + >>> ts = TopologicalSorter(graph) + >>> tuple(ts.static_order()) + ('A', 'C', 'B', 'D') + + The class is designed to easily support parallel processing of the nodes as + they become ready. For instance:: + + topological_sorter = TopologicalSorter() + + # Add nodes to 'topological_sorter'... + + topological_sorter.prepare() + while topological_sorter.is_active(): + for node in topological_sorter.get_ready(): + # Worker threads or processes take nodes to work on off the + # 'task_queue' queue. + task_queue.put(node) + + # When the work for a node is done, workers put the node in + # 'finalized_tasks_queue' so we can get more nodes to work on. + # The definition of 'is_active()' guarantees that, at this point, at + # least one node has been placed on 'task_queue' that hasn't yet + # been passed to 'done()', so this blocking 'get()' must (eventually) + # succeed. After calling 'done()', we loop back to call 'get_ready()' + # again, so put newly freed nodes on 'task_queue' as soon as + # logically possible. + node = finalized_tasks_queue.get() + topological_sorter.done(node) + + .. method:: add(node, *predecessors) + + Add a new node and its predecessors to the graph. Both the *node* and all + elements in *predecessors* must be hashable. + + If called multiple times with the same node argument, the set of + dependencies will be the union of all dependencies passed in. + + It is possible to add a node with no dependencies (*predecessors* is not + provided) or to provide a dependency twice. If a node that has not been + provided before is included among *predecessors* it will be automatically + added to the graph with no predecessors of its own. + + Raises :exc:`ValueError` if called after :meth:`~TopologicalSorter.prepare`. + + .. method:: prepare() + + Mark the graph as finished and check for cycles in the graph. If any cycle + is detected, :exc:`CycleError` will be raised, but + :meth:`~TopologicalSorter.get_ready` can still be used to obtain as many + nodes as possible until cycles block more progress. After a call to this + function, the graph cannot be modified, and therefore no more nodes can be + added using :meth:`~TopologicalSorter.add`. + + .. method:: is_active() + + Returns ``True`` if more progress can be made and ``False`` otherwise. + Progress can be made if cycles do not block the resolution and either + there are still nodes ready that haven't yet been returned by + :meth:`TopologicalSorter.get_ready` or the number of nodes marked + :meth:`TopologicalSorter.done` is less than the number that have been + returned by :meth:`TopologicalSorter.get_ready`. + + The :meth:`~TopologicalSorter.__bool__` method of this class defers to + this function, so instead of:: + + if ts.is_active(): + ... + + if possible to simply do:: + + if ts: + ... + + Raises :exc:`ValueError` if called without calling + :meth:`~TopologicalSorter.prepare` previously. + + .. method:: done(*nodes) + + Marks a set of nodes returned by :meth:`TopologicalSorter.get_ready` as + processed, unblocking any successor of each node in *nodes* for being + returned in the future by a call to :meth:`TopologicalSorter.get_ready`. + + Raises :exc:`ValueError` if any node in *nodes* has already been marked as + processed by a previous call to this method or if a node was not added to + the graph by using :meth:`TopologicalSorter.add`, if called without + calling :meth:`~TopologicalSorter.prepare` or if node has not yet been + returned by :meth:`~TopologicalSorter.get_ready`. + + .. method:: get_ready() + + Returns a ``tuple`` with all the nodes that are ready. Initially it + returns all nodes with no predecessors, and once those are marked as + processed by calling :meth:`TopologicalSorter.done`, further calls will + return all new nodes that have all their predecessors already processed. + Once no more progress can be made, empty tuples are returned. + + Raises :exc:`ValueError` if called without calling + :meth:`~TopologicalSorter.prepare` previously. + + .. method:: static_order() + + Returns an iterable of nodes in a topological order. Using this method + does not require to call :meth:`TopologicalSorter.prepare` or + :meth:`TopologicalSorter.done`. This method is equivalent to:: + + def static_order(self): + self.prepare() + while self.is_active(): + node_group = self.get_ready() + yield from node_group + self.done(*node_group) + + The particular order that is returned may depend on the specific order in + which the items were inserted in the graph. For example: + + .. doctest:: + + >>> ts = TopologicalSorter() + >>> ts.add(3, 2, 1) + >>> ts.add(1, 0) + >>> print([*ts.static_order()]) + [2, 0, 1, 3] + + >>> ts2 = TopologicalSorter() + >>> ts2.add(1, 0) + >>> ts2.add(3, 2, 1) + >>> print([*ts2.static_order()]) + [0, 2, 1, 3] + + This is due to the fact that "0" and "2" are in the same level in the + graph (they would have been returned in the same call to + :meth:`~TopologicalSorter.get_ready`) and the order between them is + determined by the order of insertion. + + + If any cycle is detected, :exc:`CycleError` will be raised. + + .. versionadded:: 3.9 + + +Exceptions +---------- +The :mod:`graphlib` module defines the following exception classes: + +.. exception:: CycleError + + Subclass of :exc:`ValueError` raised by :meth:`TopologicalSorter.prepare` if cycles exist + in the working graph. If multiple cycles exist, only one undefined choice among them will + be reported and included in the exception. + + The detected cycle can be accessed via the second element in the :attr:`~CycleError.args` + attribute of the exception instance and consists in a list of nodes, such that each node is, + in the graph, an immediate predecessor of the next node in the list. In the reported list, + the first and the last node will be the same, to make it clear that it is cyclic. \ No newline at end of file diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 3d5cec6026add..a468130af1083 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -245,6 +245,14 @@ PyPI and maintained by the CPython core team. PEP written and implemented by Paul Ganssle +graphlib +--------- + +Add the :mod:`graphlib` that contains the :class:`graphlib.TopologicalSorter` class +to offer functionality to perform topological sorting of graphs. (Contributed by Pablo +Galindo, Tim Peters and Larry Hastings in :issue:`17005`.) + + Improved Modules ================ @@ -352,13 +360,6 @@ ftplib if the given timeout for their constructor is zero to prevent the creation of a non-blocking socket. (Contributed by Dong-hee Na in :issue:`39259`.) -functools ---------- - -Add the :class:`functools.TopologicalSorter` class to offer functionality to perform -topological sorting of graphs. (Contributed by Pablo Galindo, Tim Peters and Larry -Hastings in :issue:`17005`.) - gc -- diff --git a/Lib/functools.py b/Lib/functools.py index 87c7d87438998..5cab497d26403 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -11,7 +11,6 @@ __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', 'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce', - 'TopologicalSorter', 'CycleError', 'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod', 'cached_property'] @@ -199,250 +198,6 @@ def total_ordering(cls): setattr(cls, opname, opfunc) return cls -################################################################################ -### topological sort -################################################################################ - -_NODE_OUT = -1 -_NODE_DONE = -2 - - -class _NodeInfo: - __slots__ = 'node', 'npredecessors', 'successors' - - def __init__(self, node): - # The node this class is augmenting. - self.node = node - - # Number of predecessors, generally >= 0. When this value falls to 0, - # and is returned by get_ready(), this is set to _NODE_OUT and when the - # node is marked done by a call to done(), set to _NODE_DONE. - self.npredecessors = 0 - - # List of successor nodes. The list can contain duplicated elements as - # long as they're all reflected in the successor's npredecessors attribute). - self.successors = [] - - -class CycleError(ValueError): - """Subclass of ValueError raised by TopologicalSorterif cycles exist in the graph - - If multiple cycles exist, only one undefined choice among them will be reported - and included in the exception. The detected cycle can be accessed via the second - element in the *args* attribute of the exception instance and consists in a list - of nodes, such that each node is, in the graph, an immediate predecessor of the - next node in the list. In the reported list, the first and the last node will be - the same, to make it clear that it is cyclic. - """ - pass - - -class TopologicalSorter: - """Provides functionality to topologically sort a graph of hashable nodes""" - - def __init__(self, graph=None): - self._node2info = {} - self._ready_nodes = None - self._npassedout = 0 - self._nfinished = 0 - - if graph is not None: - for node, predecessors in graph.items(): - self.add(node, *predecessors) - - def _get_nodeinfo(self, node): - if (result := self._node2info.get(node)) is None: - self._node2info[node] = result = _NodeInfo(node) - return result - - def add(self, node, *predecessors): - """Add a new node and its predecessors to the graph. - - Both the *node* and all elements in *predecessors* must be hashable. - - If called multiple times with the same node argument, the set of dependencies - will be the union of all dependencies passed in. - - It is possible to add a node with no dependencies (*predecessors* is not provided) - as well as provide a dependency twice. If a node that has not been provided before - is included among *predecessors* it will be automatically added to the graph with - no predecessors of its own. - - Raises ValueError if called after "prepare". - """ - if self._ready_nodes is not None: - raise ValueError("Nodes cannot be added after a call to prepare()") - - # Create the node -> predecessor edges - nodeinfo = self._get_nodeinfo(node) - nodeinfo.npredecessors += len(predecessors) - - # Create the predecessor -> node edges - for pred in predecessors: - pred_info = self._get_nodeinfo(pred) - pred_info.successors.append(node) - - def prepare(self): - """Mark the graph as finished and check for cycles in the graph. - - If any cycle is detected, "CycleError" will be raised, but "get_ready" can - still be used to obtain as many nodes as possible until cycles block more - progress. After a call to this function, the graph cannot be modified and - therefore no more nodes can be added using "add". - """ - if self._ready_nodes is not None: - raise ValueError("cannot prepare() more than once") - - self._ready_nodes = [i.node for i in self._node2info.values() - if i.npredecessors == 0] - # ready_nodes is set before we look for cycles on purpose: - # if the user wants to catch the CycleError, that's fine, - # they can continue using the instance to grab as many - # nodes as possible before cycles block more progress - cycle = self._find_cycle() - if cycle: - raise CycleError(f"nodes are in a cycle", cycle) - - def get_ready(self): - """Return a tuple of all the nodes that are ready. - - Initially it returns all nodes with no predecessors; once those are marked - as processed by calling "done", further calls will return all new nodes that - have all their predecessors already processed. Once no more progress can be made, - empty tuples are returned. - - Raises ValueError if called without calling "prepare" previously. - """ - if self._ready_nodes is None: - raise ValueError("prepare() must be called first") - - # Get the nodes that are ready and mark them - result = tuple(self._ready_nodes) - n2i = self._node2info - for node in result: - n2i[node].npredecessors = _NODE_OUT - - # Clean the list of nodes that are ready and update - # the counter of nodes that we have returned. - self._ready_nodes.clear() - self._npassedout += len(result) - - return result - - def is_active(self): - """Return True if more progress can be made and ``False`` otherwise. - - Progress can be made if cycles do not block the resolution and either there - are still nodes ready that haven't yet been returned by "get_ready" or the - number of nodes marked "done" is less than the number that have been returned - by "get_ready". - - Raises ValueError if called without calling "prepare" previously. - """ - if self._ready_nodes is None: - raise ValueError("prepare() must be called first") - return self._nfinished < self._npassedout or bool(self._ready_nodes) - - def __bool__(self): - return self.is_active() - - def done(self, *nodes): - """Marks a set of nodes returned by "get_ready" as processed. - - This method unblocks any successor of each node in *nodes* for being returned - in the future by a a call to "get_ready" - - Raises :exec:`ValueError` if any node in *nodes* has already been marked as - processed by a previous call to this method, if a node was not added to the - graph by using "add" or if called without calling "prepare" previously or if - node has not yet been returned by "get_ready". - """ - - if self._ready_nodes is None: - raise ValueError("prepare() must be called first") - - n2i = self._node2info - - for node in nodes: - - # Check if we know about this node (it was added previously using add() - if (nodeinfo := n2i.get(node)) is None: - raise ValueError(f"node {node!r} was not added using add()") - - # If the node has not being returned (marked as ready) previously, inform the user. - stat = nodeinfo.npredecessors - if stat != _NODE_OUT: - if stat >= 0: - raise ValueError(f"node {node!r} was not passed out (still not ready)") - elif stat == _NODE_DONE: - raise ValueError(f"node {node!r} was already marked done") - else: - assert False, f"node {node!r}: unknown status {stat}" - - # Mark the node as processed - nodeinfo.npredecessors = _NODE_DONE - - # Go to all the successors and reduce the number of predecessors, collecting all the ones - # that are ready to be returned in the next get_ready() call. - for successor in nodeinfo.successors: - successor_info = n2i[successor] - successor_info.npredecessors -= 1 - if successor_info.npredecessors == 0: - self._ready_nodes.append(successor) - self._nfinished += 1 - - def _find_cycle(self): - n2i = self._node2info - stack = [] - itstack = [] - seen = set() - node2stacki = {} - - for node in n2i: - if node in seen: - continue - - while True: - if node in seen: - # If we have seen already the node and is in the - # current stack we have found a cycle. - if node in node2stacki: - return stack[node2stacki[node]:] + [node] - # else go on to get next successor - else: - seen.add(node) - itstack.append(iter(n2i[node].successors).__next__) - node2stacki[node] = len(stack) - stack.append(node) - - # Backtrack to the topmost stack entry with - # at least another successor. - while stack: - try: - node = itstack[-1]() - break - except StopIteration: - del node2stacki[stack.pop()] - itstack.pop() - else: - break - return None - - def static_order(self): - """Returns an iterable of nodes in a topological order. - - The particular order that is returned may depend on the specific - order in which the items were inserted in the graph. - - Using this method does not require to call "prepare" or "done". If any - cycle is detected, :exc:`CycleError` will be raised. - """ - self.prepare() - while self.is_active(): - node_group = self.get_ready() - yield from node_group - self.done(*node_group) - ################################################################################ ### cmp_to_key() function converter diff --git a/Lib/graphlib.py b/Lib/graphlib.py new file mode 100644 index 0000000000000..948f62f1dc303 --- /dev/null +++ b/Lib/graphlib.py @@ -0,0 +1,245 @@ +__all__ = ["TopologicalSorter", "CycleError"] + +_NODE_OUT = -1 +_NODE_DONE = -2 + + +class _NodeInfo: + __slots__ = "node", "npredecessors", "successors" + + def __init__(self, node): + # The node this class is augmenting. + self.node = node + + # Number of predecessors, generally >= 0. When this value falls to 0, + # and is returned by get_ready(), this is set to _NODE_OUT and when the + # node is marked done by a call to done(), set to _NODE_DONE. + self.npredecessors = 0 + + # List of successor nodes. The list can contain duplicated elements as + # long as they're all reflected in the successor's npredecessors attribute). + self.successors = [] + + +class CycleError(ValueError): + """Subclass of ValueError raised by TopologicalSorterif cycles exist in the graph + + If multiple cycles exist, only one undefined choice among them will be reported + and included in the exception. The detected cycle can be accessed via the second + element in the *args* attribute of the exception instance and consists in a list + of nodes, such that each node is, in the graph, an immediate predecessor of the + next node in the list. In the reported list, the first and the last node will be + the same, to make it clear that it is cyclic. + """ + + pass + + +class TopologicalSorter: + """Provides functionality to topologically sort a graph of hashable nodes""" + + def __init__(self, graph=None): + self._node2info = {} + self._ready_nodes = None + self._npassedout = 0 + self._nfinished = 0 + + if graph is not None: + for node, predecessors in graph.items(): + self.add(node, *predecessors) + + def _get_nodeinfo(self, node): + if (result := self._node2info.get(node)) is None: + self._node2info[node] = result = _NodeInfo(node) + return result + + def add(self, node, *predecessors): + """Add a new node and its predecessors to the graph. + + Both the *node* and all elements in *predecessors* must be hashable. + + If called multiple times with the same node argument, the set of dependencies + will be the union of all dependencies passed in. + + It is possible to add a node with no dependencies (*predecessors* is not provided) + as well as provide a dependency twice. If a node that has not been provided before + is included among *predecessors* it will be automatically added to the graph with + no predecessors of its own. + + Raises ValueError if called after "prepare". + """ + if self._ready_nodes is not None: + raise ValueError("Nodes cannot be added after a call to prepare()") + + # Create the node -> predecessor edges + nodeinfo = self._get_nodeinfo(node) + nodeinfo.npredecessors += len(predecessors) + + # Create the predecessor -> node edges + for pred in predecessors: + pred_info = self._get_nodeinfo(pred) + pred_info.successors.append(node) + + def prepare(self): + """Mark the graph as finished and check for cycles in the graph. + + If any cycle is detected, "CycleError" will be raised, but "get_ready" can + still be used to obtain as many nodes as possible until cycles block more + progress. After a call to this function, the graph cannot be modified and + therefore no more nodes can be added using "add". + """ + if self._ready_nodes is not None: + raise ValueError("cannot prepare() more than once") + + self._ready_nodes = [ + i.node for i in self._node2info.values() if i.npredecessors == 0 + ] + # ready_nodes is set before we look for cycles on purpose: + # if the user wants to catch the CycleError, that's fine, + # they can continue using the instance to grab as many + # nodes as possible before cycles block more progress + cycle = self._find_cycle() + if cycle: + raise CycleError(f"nodes are in a cycle", cycle) + + def get_ready(self): + """Return a tuple of all the nodes that are ready. + + Initially it returns all nodes with no predecessors; once those are marked + as processed by calling "done", further calls will return all new nodes that + have all their predecessors already processed. Once no more progress can be made, + empty tuples are returned. + + Raises ValueError if called without calling "prepare" previously. + """ + if self._ready_nodes is None: + raise ValueError("prepare() must be called first") + + # Get the nodes that are ready and mark them + result = tuple(self._ready_nodes) + n2i = self._node2info + for node in result: + n2i[node].npredecessors = _NODE_OUT + + # Clean the list of nodes that are ready and update + # the counter of nodes that we have returned. + self._ready_nodes.clear() + self._npassedout += len(result) + + return result + + def is_active(self): + """Return True if more progress can be made and ``False`` otherwise. + + Progress can be made if cycles do not block the resolution and either there + are still nodes ready that haven't yet been returned by "get_ready" or the + number of nodes marked "done" is less than the number that have been returned + by "get_ready". + + Raises ValueError if called without calling "prepare" previously. + """ + if self._ready_nodes is None: + raise ValueError("prepare() must be called first") + return self._nfinished < self._npassedout or bool(self._ready_nodes) + + def __bool__(self): + return self.is_active() + + def done(self, *nodes): + """Marks a set of nodes returned by "get_ready" as processed. + + This method unblocks any successor of each node in *nodes* for being returned + in the future by a a call to "get_ready" + + Raises :exec:`ValueError` if any node in *nodes* has already been marked as + processed by a previous call to this method, if a node was not added to the + graph by using "add" or if called without calling "prepare" previously or if + node has not yet been returned by "get_ready". + """ + + if self._ready_nodes is None: + raise ValueError("prepare() must be called first") + + n2i = self._node2info + + for node in nodes: + + # Check if we know about this node (it was added previously using add() + if (nodeinfo := n2i.get(node)) is None: + raise ValueError(f"node {node!r} was not added using add()") + + # If the node has not being returned (marked as ready) previously, inform the user. + stat = nodeinfo.npredecessors + if stat != _NODE_OUT: + if stat >= 0: + raise ValueError( + f"node {node!r} was not passed out (still not ready)" + ) + elif stat == _NODE_DONE: + raise ValueError(f"node {node!r} was already marked done") + else: + assert False, f"node {node!r}: unknown status {stat}" + + # Mark the node as processed + nodeinfo.npredecessors = _NODE_DONE + + # Go to all the successors and reduce the number of predecessors, collecting all the ones + # that are ready to be returned in the next get_ready() call. + for successor in nodeinfo.successors: + successor_info = n2i[successor] + successor_info.npredecessors -= 1 + if successor_info.npredecessors == 0: + self._ready_nodes.append(successor) + self._nfinished += 1 + + def _find_cycle(self): + n2i = self._node2info + stack = [] + itstack = [] + seen = set() + node2stacki = {} + + for node in n2i: + if node in seen: + continue + + while True: + if node in seen: + # If we have seen already the node and is in the + # current stack we have found a cycle. + if node in node2stacki: + return stack[node2stacki[node] :] + [node] + # else go on to get next successor + else: + seen.add(node) + itstack.append(iter(n2i[node].successors).__next__) + node2stacki[node] = len(stack) + stack.append(node) + + # Backtrack to the topmost stack entry with + # at least another successor. + while stack: + try: + node = itstack[-1]() + break + except StopIteration: + del node2stacki[stack.pop()] + itstack.pop() + else: + break + return None + + def static_order(self): + """Returns an iterable of nodes in a topological order. + + The particular order that is returned may depend on the specific + order in which the items were inserted in the graph. + + Using this method does not require to call "prepare" or "done". If any + cycle is detected, :exc:`CycleError` will be raised. + """ + self.prepare() + while self.is_active(): + node_group = self.get_ready() + yield from node_group + self.done(*node_group) diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 72b7765853bc0..e726188982bc4 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -3,7 +3,7 @@ import collections import collections.abc import copy -from itertools import permutations, chain +from itertools import permutations import pickle from random import choice import sys @@ -1164,275 +1164,6 @@ def __eq__(self, other): return self.value == other.value -class TestTopologicalSort(unittest.TestCase): - - def _test_graph(self, graph, expected): - - def static_order_with_groups(ts): - ts.prepare() - while ts.is_active(): - nodes = ts.get_ready() - for node in nodes: - ts.done(node) - yield nodes - - ts = functools.TopologicalSorter(graph) - self.assertEqual(list(static_order_with_groups(ts)), list(expected)) - - ts = functools.TopologicalSorter(graph) - self.assertEqual(list(ts.static_order()), list(chain(*expected))) - - def _assert_cycle(self, graph, cycle): - ts = functools.TopologicalSorter() - for node, dependson in graph.items(): - ts.add(node, *dependson) - try: - ts.prepare() - except functools.CycleError as e: - msg, seq = e.args - self.assertIn(' '.join(map(str, cycle)), - ' '.join(map(str, seq * 2))) - else: - raise - - def test_simple_cases(self): - self._test_graph( - {2: {11}, - 9: {11, 8}, - 10: {11, 3}, - 11: {7, 5}, - 8: {7, 3}}, - [(3, 5, 7), (11, 8), (2, 10, 9)] - ) - - self._test_graph({1: {}}, [(1,)]) - - self._test_graph({x: {x+1} for x in range(10)}, - [(x,) for x in range(10, -1, -1)]) - - self._test_graph({2: {3}, 3: {4}, 4: {5}, 5: {1}, - 11: {12}, 12: {13}, 13: {14}, 14: {15}}, - [(1, 15), (5, 14), (4, 13), (3, 12), (2, 11)]) - - self._test_graph({ - 0: [1, 2], - 1: [3], - 2: [5, 6], - 3: [4], - 4: [9], - 5: [3], - 6: [7], - 7: [8], - 8: [4], - 9: [] - }, - [(9,), (4,), (3, 8), (1, 5, 7), (6,), (2,), (0,)] - ) - - self._test_graph({ - 0: [1, 2], - 1: [], - 2: [3], - 3: [] - }, - [(1, 3), (2,), (0,)] - ) - - self._test_graph({ - 0: [1, 2], - 1: [], - 2: [3], - 3: [], - 4: [5], - 5: [6], - 6: [] - }, - [(1, 3, 6), (2, 5), (0, 4)] - ) - - def test_no_dependencies(self): - self._test_graph( - {1: {2}, - 3: {4}, - 5: {6}}, - [(2, 4, 6), (1, 3, 5)] - ) - - self._test_graph( - {1: set(), - 3: set(), - 5: set()}, - [(1, 3, 5)] - ) - - def test_the_node_multiple_times(self): - # Test same node multiple times in dependencies - self._test_graph({1: {2}, 3: {4}, 0: [2, 4, 4, 4, 4, 4]}, - [(2, 4), (1, 3, 0)]) - - # Test adding the same dependency multiple times - ts = functools.TopologicalSorter() - ts.add(1, 2) - ts.add(1, 2) - ts.add(1, 2) - self.assertEqual([*ts.static_order()], [2, 1]) - - def test_graph_with_iterables(self): - dependson = (2*x + 1 for x in range(5)) - ts = functools.TopologicalSorter({0: dependson}) - self.assertEqual(list(ts.static_order()), [1, 3, 5, 7, 9, 0]) - - def test_add_dependencies_for_same_node_incrementally(self): - # Test same node multiple times - ts = functools.TopologicalSorter() - ts.add(1, 2) - ts.add(1, 3) - ts.add(1, 4) - ts.add(1, 5) - - ts2 = functools.TopologicalSorter({1: {2, 3, 4, 5}}) - self.assertEqual([*ts.static_order()], [*ts2.static_order()]) - - def test_empty(self): - self._test_graph({}, []) - - def test_cycle(self): - # Self cycle - self._assert_cycle({1: {1}}, [1, 1]) - # Simple cycle - self._assert_cycle({1: {2}, 2: {1}}, [1, 2, 1]) - # Indirect cycle - self._assert_cycle({1: {2}, 2: {3}, 3: {1}}, [1, 3, 2, 1]) - # not all elements involved in a cycle - self._assert_cycle({1: {2}, 2: {3}, 3: {1}, 5: {4}, 4: {6}}, [1, 3, 2, 1]) - # Multiple cycles - self._assert_cycle({1: {2}, 2: {1}, 3: {4}, 4: {5}, 6: {7}, 7: {6}}, - [1, 2, 1]) - # Cycle in the middle of the graph - self._assert_cycle({1: {2}, 2: {3}, 3: {2, 4}, 4: {5}}, [3, 2]) - - def test_calls_before_prepare(self): - ts = functools.TopologicalSorter() - - with self.assertRaisesRegex(ValueError, r"prepare\(\) must be called first"): - ts.get_ready() - with self.assertRaisesRegex(ValueError, r"prepare\(\) must be called first"): - ts.done(3) - with self.assertRaisesRegex(ValueError, r"prepare\(\) must be called first"): - ts.is_active() - - def test_prepare_multiple_times(self): - ts = functools.TopologicalSorter() - ts.prepare() - with self.assertRaisesRegex(ValueError, r"cannot prepare\(\) more than once"): - ts.prepare() - - def test_invalid_nodes_in_done(self): - ts = functools.TopologicalSorter() - ts.add(1, 2, 3, 4) - ts.add(2, 3, 4) - ts.prepare() - ts.get_ready() - - with self.assertRaisesRegex(ValueError, "node 2 was not passed out"): - ts.done(2) - with self.assertRaisesRegex(ValueError, r"node 24 was not added using add\(\)"): - ts.done(24) - - def test_done(self): - ts = functools.TopologicalSorter() - ts.add(1, 2, 3, 4) - ts.add(2, 3) - ts.prepare() - - self.assertEqual(ts.get_ready(), (3, 4)) - # If we don't mark anything as done, get_ready() returns nothing - self.assertEqual(ts.get_ready(), ()) - ts.done(3) - # Now 2 becomes available as 3 is done - self.assertEqual(ts.get_ready(), (2,)) - self.assertEqual(ts.get_ready(), ()) - ts.done(4) - ts.done(2) - # Only 1 is missing - self.assertEqual(ts.get_ready(), (1,)) - self.assertEqual(ts.get_ready(), ()) - ts.done(1) - self.assertEqual(ts.get_ready(), ()) - self.assertFalse(ts.is_active()) - - def test_is_active(self): - ts = functools.TopologicalSorter() - ts.add(1, 2) - ts.prepare() - - self.assertTrue(ts.is_active()) - self.assertEqual(ts.get_ready(), (2,)) - self.assertTrue(ts.is_active()) - ts.done(2) - self.assertTrue(ts.is_active()) - self.assertEqual(ts.get_ready(), (1,)) - self.assertTrue(ts.is_active()) - ts.done(1) - self.assertFalse(ts.is_active()) - - def test_not_hashable_nodes(self): - ts = functools.TopologicalSorter() - self.assertRaises(TypeError, ts.add, dict(), 1) - self.assertRaises(TypeError, ts.add, 1, dict()) - self.assertRaises(TypeError, ts.add, dict(), dict()) - - def test_order_of_insertion_does_not_matter_between_groups(self): - def get_groups(ts): - ts.prepare() - while ts.is_active(): - nodes = ts.get_ready() - ts.done(*nodes) - yield set(nodes) - - ts = functools.TopologicalSorter() - ts.add(3, 2, 1) - ts.add(1, 0) - ts.add(4, 5) - ts.add(6, 7) - ts.add(4, 7) - - ts2 = functools.TopologicalSorter() - ts2.add(1, 0) - ts2.add(3, 2, 1) - ts2.add(4, 7) - ts2.add(6, 7) - ts2.add(4, 5) - - self.assertEqual(list(get_groups(ts)), list(get_groups(ts2))) - - def test_static_order_does_not_change_with_the_hash_seed(self): - def check_order_with_hash_seed(seed): - code = """if 1: - import functools - ts = functools.TopologicalSorter() - ts.add('blech', 'bluch', 'hola') - ts.add('abcd', 'blech', 'bluch', 'a', 'b') - ts.add('a', 'a string', 'something', 'b') - ts.add('bluch', 'hola', 'abcde', 'a', 'b') - print(list(ts.static_order())) - """ - env = os.environ.copy() - # signal to assert_python not to do a copy - # of os.environ on its own - env['__cleanenv'] = True - env['PYTHONHASHSEED'] = str(seed) - out = assert_python_ok('-c', code, **env) - return out - - run1 = check_order_with_hash_seed(1234) - run2 = check_order_with_hash_seed(31415) - - self.assertNotEqual(run1, "") - self.assertNotEqual(run2, "") - self.assertEqual(run1, run2) - - class TestCache: # This tests that the pass-through is working as designed. # The underlying functionality is tested in TestLRU. diff --git a/Lib/test/test_graphlib.py b/Lib/test/test_graphlib.py new file mode 100644 index 0000000000000..00432537f22d0 --- /dev/null +++ b/Lib/test/test_graphlib.py @@ -0,0 +1,244 @@ +from itertools import chain +import graphlib +import os +import unittest + +from test.support.script_helper import assert_python_ok + +class TestTopologicalSort(unittest.TestCase): + def _test_graph(self, graph, expected): + def static_order_with_groups(ts): + ts.prepare() + while ts.is_active(): + nodes = ts.get_ready() + for node in nodes: + ts.done(node) + yield nodes + + ts = graphlib.TopologicalSorter(graph) + self.assertEqual(list(static_order_with_groups(ts)), list(expected)) + + ts = graphlib.TopologicalSorter(graph) + self.assertEqual(list(ts.static_order()), list(chain(*expected))) + + def _assert_cycle(self, graph, cycle): + ts = graphlib.TopologicalSorter() + for node, dependson in graph.items(): + ts.add(node, *dependson) + try: + ts.prepare() + except graphlib.CycleError as e: + msg, seq = e.args + self.assertIn(" ".join(map(str, cycle)), " ".join(map(str, seq * 2))) + else: + raise + + def test_simple_cases(self): + self._test_graph( + {2: {11}, 9: {11, 8}, 10: {11, 3}, 11: {7, 5}, 8: {7, 3}}, + [(3, 5, 7), (11, 8), (2, 10, 9)], + ) + + self._test_graph({1: {}}, [(1,)]) + + self._test_graph( + {x: {x + 1} for x in range(10)}, [(x,) for x in range(10, -1, -1)] + ) + + self._test_graph( + {2: {3}, 3: {4}, 4: {5}, 5: {1}, 11: {12}, 12: {13}, 13: {14}, 14: {15}}, + [(1, 15), (5, 14), (4, 13), (3, 12), (2, 11)], + ) + + self._test_graph( + { + 0: [1, 2], + 1: [3], + 2: [5, 6], + 3: [4], + 4: [9], + 5: [3], + 6: [7], + 7: [8], + 8: [4], + 9: [], + }, + [(9,), (4,), (3, 8), (1, 5, 7), (6,), (2,), (0,)], + ) + + self._test_graph({0: [1, 2], 1: [], 2: [3], 3: []}, [(1, 3), (2,), (0,)]) + + self._test_graph( + {0: [1, 2], 1: [], 2: [3], 3: [], 4: [5], 5: [6], 6: []}, + [(1, 3, 6), (2, 5), (0, 4)], + ) + + def test_no_dependencies(self): + self._test_graph({1: {2}, 3: {4}, 5: {6}}, [(2, 4, 6), (1, 3, 5)]) + + self._test_graph({1: set(), 3: set(), 5: set()}, [(1, 3, 5)]) + + def test_the_node_multiple_times(self): + # Test same node multiple times in dependencies + self._test_graph({1: {2}, 3: {4}, 0: [2, 4, 4, 4, 4, 4]}, [(2, 4), (1, 3, 0)]) + + # Test adding the same dependency multiple times + ts = graphlib.TopologicalSorter() + ts.add(1, 2) + ts.add(1, 2) + ts.add(1, 2) + self.assertEqual([*ts.static_order()], [2, 1]) + + def test_graph_with_iterables(self): + dependson = (2 * x + 1 for x in range(5)) + ts = graphlib.TopologicalSorter({0: dependson}) + self.assertEqual(list(ts.static_order()), [1, 3, 5, 7, 9, 0]) + + def test_add_dependencies_for_same_node_incrementally(self): + # Test same node multiple times + ts = graphlib.TopologicalSorter() + ts.add(1, 2) + ts.add(1, 3) + ts.add(1, 4) + ts.add(1, 5) + + ts2 = graphlib.TopologicalSorter({1: {2, 3, 4, 5}}) + self.assertEqual([*ts.static_order()], [*ts2.static_order()]) + + def test_empty(self): + self._test_graph({}, []) + + def test_cycle(self): + # Self cycle + self._assert_cycle({1: {1}}, [1, 1]) + # Simple cycle + self._assert_cycle({1: {2}, 2: {1}}, [1, 2, 1]) + # Indirect cycle + self._assert_cycle({1: {2}, 2: {3}, 3: {1}}, [1, 3, 2, 1]) + # not all elements involved in a cycle + self._assert_cycle({1: {2}, 2: {3}, 3: {1}, 5: {4}, 4: {6}}, [1, 3, 2, 1]) + # Multiple cycles + self._assert_cycle({1: {2}, 2: {1}, 3: {4}, 4: {5}, 6: {7}, 7: {6}}, [1, 2, 1]) + # Cycle in the middle of the graph + self._assert_cycle({1: {2}, 2: {3}, 3: {2, 4}, 4: {5}}, [3, 2]) + + def test_calls_before_prepare(self): + ts = graphlib.TopologicalSorter() + + with self.assertRaisesRegex(ValueError, r"prepare\(\) must be called first"): + ts.get_ready() + with self.assertRaisesRegex(ValueError, r"prepare\(\) must be called first"): + ts.done(3) + with self.assertRaisesRegex(ValueError, r"prepare\(\) must be called first"): + ts.is_active() + + def test_prepare_multiple_times(self): + ts = graphlib.TopologicalSorter() + ts.prepare() + with self.assertRaisesRegex(ValueError, r"cannot prepare\(\) more than once"): + ts.prepare() + + def test_invalid_nodes_in_done(self): + ts = graphlib.TopologicalSorter() + ts.add(1, 2, 3, 4) + ts.add(2, 3, 4) + ts.prepare() + ts.get_ready() + + with self.assertRaisesRegex(ValueError, "node 2 was not passed out"): + ts.done(2) + with self.assertRaisesRegex(ValueError, r"node 24 was not added using add\(\)"): + ts.done(24) + + def test_done(self): + ts = graphlib.TopologicalSorter() + ts.add(1, 2, 3, 4) + ts.add(2, 3) + ts.prepare() + + self.assertEqual(ts.get_ready(), (3, 4)) + # If we don't mark anything as done, get_ready() returns nothing + self.assertEqual(ts.get_ready(), ()) + ts.done(3) + # Now 2 becomes available as 3 is done + self.assertEqual(ts.get_ready(), (2,)) + self.assertEqual(ts.get_ready(), ()) + ts.done(4) + ts.done(2) + # Only 1 is missing + self.assertEqual(ts.get_ready(), (1,)) + self.assertEqual(ts.get_ready(), ()) + ts.done(1) + self.assertEqual(ts.get_ready(), ()) + self.assertFalse(ts.is_active()) + + def test_is_active(self): + ts = graphlib.TopologicalSorter() + ts.add(1, 2) + ts.prepare() + + self.assertTrue(ts.is_active()) + self.assertEqual(ts.get_ready(), (2,)) + self.assertTrue(ts.is_active()) + ts.done(2) + self.assertTrue(ts.is_active()) + self.assertEqual(ts.get_ready(), (1,)) + self.assertTrue(ts.is_active()) + ts.done(1) + self.assertFalse(ts.is_active()) + + def test_not_hashable_nodes(self): + ts = graphlib.TopologicalSorter() + self.assertRaises(TypeError, ts.add, dict(), 1) + self.assertRaises(TypeError, ts.add, 1, dict()) + self.assertRaises(TypeError, ts.add, dict(), dict()) + + def test_order_of_insertion_does_not_matter_between_groups(self): + def get_groups(ts): + ts.prepare() + while ts.is_active(): + nodes = ts.get_ready() + ts.done(*nodes) + yield set(nodes) + + ts = graphlib.TopologicalSorter() + ts.add(3, 2, 1) + ts.add(1, 0) + ts.add(4, 5) + ts.add(6, 7) + ts.add(4, 7) + + ts2 = graphlib.TopologicalSorter() + ts2.add(1, 0) + ts2.add(3, 2, 1) + ts2.add(4, 7) + ts2.add(6, 7) + ts2.add(4, 5) + + self.assertEqual(list(get_groups(ts)), list(get_groups(ts2))) + + def test_static_order_does_not_change_with_the_hash_seed(self): + def check_order_with_hash_seed(seed): + code = """if 1: + import graphlib + ts = graphlib.TopologicalSorter() + ts.add('blech', 'bluch', 'hola') + ts.add('abcd', 'blech', 'bluch', 'a', 'b') + ts.add('a', 'a string', 'something', 'b') + ts.add('bluch', 'hola', 'abcde', 'a', 'b') + print(list(ts.static_order())) + """ + env = os.environ.copy() + # signal to assert_python not to do a copy + # of os.environ on its own + env["__cleanenv"] = True + env["PYTHONHASHSEED"] = str(seed) + out = assert_python_ok("-c", code, **env) + return out + + run1 = check_order_with_hash_seed(1234) + run2 = check_order_with_hash_seed(31415) + + self.assertNotEqual(run1, "") + self.assertNotEqual(run2, "") + self.assertEqual(run1, run2) diff --git a/Misc/NEWS.d/next/Library/2020-05-31-23-32-36.bpo-17005.JlRUGB.rst b/Misc/NEWS.d/next/Library/2020-05-31-23-32-36.bpo-17005.JlRUGB.rst new file mode 100644 index 0000000000000..0fd01fb623093 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-31-23-32-36.bpo-17005.JlRUGB.rst @@ -0,0 +1,4 @@ +The topological sort functionality that was introduced initially in the +:mod:`functools` module has been moved to a new :mod:`graphlib` module to +better accommodate the new tools and keep the original scope of the +:mod:`functools` module. Patch by Pablo Galindo diff --git a/PCbuild/lib.pyproj b/PCbuild/lib.pyproj index 7ce88e5690b45..f0c51edb9d1ca 100644 --- a/PCbuild/lib.pyproj +++ b/PCbuild/lib.pyproj @@ -419,6 +419,7 @@ + From webhook-mailer at python.org Fri May 1 08:49:40 2020 From: webhook-mailer at python.org (Furkan =?utf-8?q?=C3=96nder?=) Date: Fri, 01 May 2020 12:49:40 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-40462: fix variable and function names (GH-19832) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/719e14d2837520c18398a3e22a36f20c1fe7= 6edf commit: 719e14d2837520c18398a3e22a36f20c1fe76edf branch: master author: Furkan =C3=96nder committer: GitHub date: 2020-05-01T05:49:35-07:00 summary: bpo-40462: fix variable and function names (GH-19832) Automerge-Triggered-By: @vstinner files: M Lib/test/mock_socket.py M Lib/test/test_frame.py M Lib/unittest/test/test_program.py diff --git a/Lib/test/mock_socket.py b/Lib/test/mock_socket.py index b28c4732cc3c2..cda4db25cba59 100644 --- a/Lib/test/mock_socket.py +++ b/Lib/test/mock_socket.py @@ -91,7 +91,7 @@ def makefile(self, mode=3D'r', bufsize=3D-1): handle =3D MockFile(self.lines) return handle =20 - def sendall(self, buffer, flags=3DNone): + def sendall(self, data, flags=3DNone): self.last =3D data self.output.append(data) return len(data) diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index d6aa2834cbc28..a8696f011f945 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -50,7 +50,7 @@ def g(): nonlocal endly try: yield - inner() + self.inner() finally: endly =3D True gen =3D g() diff --git a/Lib/unittest/test/test_program.py b/Lib/unittest/test/test_progr= am.py index 4a62ae1b11306..eef82ff937ab7 100644 --- a/Lib/unittest/test/test_program.py +++ b/Lib/unittest/test/test_program.py @@ -188,8 +188,6 @@ def testBufferCatchFailfast(self): program =3D self.program for arg, attr in (('buffer', 'buffer'), ('failfast', 'failfast'), ('catch', 'catchbreak')): - if attr =3D=3D 'catch' and not hasInstallHandler: - continue =20 setattr(program, attr, None) program.parseArgs([None]) From webhook-mailer at python.org Mon May 4 15:02:09 2020 From: webhook-mailer at python.org (Miro =?utf-8?q?Hron=C4=8Dok?=) Date: Mon, 04 May 2020 19:02:09 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-40360: Add a What's New entry for lib2to3 pending deprecation (GH-19898) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/18f1c60a1625d341a905c7e07367c32c08f2= 22df commit: 18f1c60a1625d341a905c7e07367c32c08f222df branch: master author: Miro Hron=C4=8Dok committer: GitHub date: 2020-05-04T12:02:00-07:00 summary: bpo-40360: Add a What's New entry for lib2to3 pending deprecation (GH-19898) files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index cefaf5715d414..11e577baa8fb5 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -608,6 +608,16 @@ Deprecated * Passing ``None`` as the first argument to the :func:`shlex.split` function has been deprecated. (Contributed by Zackery Spytz in :issue:`33262`.) =20 +* The :mod:`lib2to3` module now emits a :exc:`PendingDeprecationWarning`. + Python 3.9 switched to a PEG parser (see :pep:`617`), and Python 3.10 may + include new language syntax that is not parsable by lib2to3's LL(1) parser. + The ``lib2to3`` module may be removed from the standard library in a future + Python version. Consider third-party alternatives such as `LibCST`_ or + `parso`_. + (Contributed by Carl Meyer in :issue:`40360`.) + +.. _LibCST: https://libcst.readthedocs.io/ +.. _parso: https://parso.readthedocs.io/ =20 Removed =3D=3D=3D=3D=3D=3D=3D From webhook-mailer at python.org Wed May 13 17:43:35 2020 From: webhook-mailer at python.org (=?utf-8?q?=C5=81ukasz?= Langa) Date: Wed, 13 May 2020 21:43:35 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Python 3.8.3 Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/6f8c8320e9eac9bc7a7f653b43506e75916c= e8e8 commit: 6f8c8320e9eac9bc7a7f653b43506e75916ce8e8 branch: 3.8 author: =C5=81ukasz Langa committer: =C5=81ukasz Langa date: 2020-05-13T19:31:54+02:00 summary: Python 3.8.3 files: A Misc/NEWS.d/3.8.3.rst D Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-01-14-58-16.bpo-39562.KCsX8n.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst D Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst D Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst D Misc/NEWS.d/next/Library/2020-04-26-22-25-36.bpo-40398.OdXnR3.rst D Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst D Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst D Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst D Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst M Include/patchlevel.h M Lib/pydoc_data/topics.py M README.rst diff --git a/Include/patchlevel.h b/Include/patchlevel.h index f3c412bf8dd8e..2f6a68fbe0ad8 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -19,11 +19,11 @@ #define PY_MAJOR_VERSION 3 #define PY_MINOR_VERSION 8 #define PY_MICRO_VERSION 3 -#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_GAMMA -#define PY_RELEASE_SERIAL 1 +#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL +#define PY_RELEASE_SERIAL 0 =20 /* Version as a string */ -#define PY_VERSION "3.8.3rc1+" +#define PY_VERSION "3.8.3" /*--end constants--*/ =20 /* Version as a single 4-byte hex number, e.g. 0x010502B2 =3D=3D 1.5.2b2. diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index ba068f0b2b9f7..06f0e781772f8 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Wed Apr 29 19:18:01 2020 +# Autogenerated by Sphinx on Wed May 13 19:29:27 2020 topics =3D {'assert': 'The "assert" statement\n' '**********************\n' '\n' diff --git a/Misc/NEWS.d/3.8.3.rst b/Misc/NEWS.d/3.8.3.rst new file mode 100644 index 0000000000000..39a4417409df5 --- /dev/null +++ b/Misc/NEWS.d/3.8.3.rst @@ -0,0 +1,107 @@ +.. bpo: 40527 +.. date: 2020-05-06-14-52-35 +.. nonce: gTNKuy +.. release date: 2020-05-13 +.. section: Core and Builtins + +Fix command line argument parsing: no longer write errors multiple times +into stderr. + +.. + +.. bpo: 40417 +.. date: 2020-05-01-19-04-52 +.. nonce: Sti2lJ +.. section: Core and Builtins + +Fix imp module deprecation warning when PyImport_ReloadModule is called. +Patch by Robert Rouhani. + +.. + +.. bpo: 39562 +.. date: 2020-05-01-14-58-16 +.. nonce: KCsX8n +.. section: Core and Builtins + +The constant values of future flags in the :mod:`__future__` module are +updated in order to prevent collision with compiler flags. Previously +``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. + +.. + +.. bpo: 40559 +.. date: 2020-05-05-08-12-51 +.. nonce: 112wwa +.. section: Library + +Fix possible memory leak in the C implementation of :class:`asyncio.Task`. + +.. + +.. bpo: 40355 +.. date: 2020-05-02-14-24-48 +.. nonce: xTujaB +.. section: Library + +Improve error reporting in :func:`ast.literal_eval` in the presence of +malformed :class:`ast.Dict` nodes instead of silently ignoring any +non-conforming elements. Patch by Curtis Bucher. + +.. + +.. bpo: 40459 +.. date: 2020-05-02-04-29-31 +.. nonce: fSAYVD +.. section: Library + +:func:`platform.win32_ver` now produces correct *ptype* strings instead of +empty strings. + +.. + +.. bpo: 40398 +.. date: 2020-04-26-22-25-36 +.. nonce: OdXnR3 +.. section: Library + +:func:`typing.get_args` now always returns an empty tuple for special +generic aliases. + +.. + +.. bpo: 40561 +.. date: 2020-05-08-08-39-40 +.. nonce: ZMB_2i +.. section: Documentation + +Provide docstrings for webbrowser open functions. + +.. + +.. bpo: 39435 +.. date: 2020-01-24-05-42-57 +.. nonce: EFcdFU +.. section: Documentation + +Fix an incorrect signature for :func:`pickle.loads` in the docs + +.. + +.. bpo: 40458 +.. date: 2020-05-01-20-57-57 +.. nonce: Eb0ueI +.. section: Windows + +Increase reserved stack space to prevent overflow crash on Windows. + +.. + +.. bpo: 40412 +.. date: 2020-05-01-17-28-04 +.. nonce: dE0D8N +.. section: C API + +Nullify inittab_copy during finalization, preventing future interpreter +initializations in an embedded situation from crashing. Patch by Gregory +Szorc. diff --git a/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst = b/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst deleted file mode 100644 index 92bfcddf115a6..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst=09 +++ /dev/null @@ -1 +0,0 @@ -Nullify inittab_copy during finalization, preventing future interpreter init= ializations in an embedded situation from crashing. Patch by Gregory Szorc. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-14-58-16.bpo-39562= .KCsX8n.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-14-58-16.bpo-3956= 2.KCsX8n.rst deleted file mode 100644 index 5d7ef9606b449..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-14-58-16.bpo-39562.KCsX8n= .rst=09 +++ /dev/null @@ -1,3 +0,0 @@ -The constant values of future flags in the :mod:`__future__` module are -updated in order to prevent collision with compiler flags. Previously -``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417= .Sti2lJ.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-4041= 7.Sti2lJ.rst deleted file mode 100644 index 932e853a8933d..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Fix imp module deprecation warning when PyImport_ReloadModule is called. Pat= ch by Robert Rouhani. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527= .gTNKuy.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-4052= 7.gTNKuy.rst deleted file mode 100644 index 19b8888230c65..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Fix command line argument parsing: no longer write errors multiple times -into stderr. diff --git a/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFc= dFU.rst b/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU= .rst deleted file mode 100644 index 40294c10df00a..0000000000000 --- a/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst +++ /dev/null @@ -1 +0,0 @@ -Fix an incorrect signature for :func:`pickle.loads` in the docs \ No newline at end of file diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB= _2i.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i= .rst deleted file mode 100644 index bda24719b12cb..0000000000000 --- a/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst +++ /dev/null @@ -1 +0,0 @@ -Provide docstrings for webbrowser open functions. diff --git a/Misc/NEWS.d/next/Library/2020-04-26-22-25-36.bpo-40398.OdXnR3.rs= t b/Misc/NEWS.d/next/Library/2020-04-26-22-25-36.bpo-40398.OdXnR3.rst deleted file mode 100644 index a56da0c109592..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-26-22-25-36.bpo-40398.OdXnR3.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`typing.get_args` now always returns an empty tuple for special -generic aliases. diff --git a/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rs= t b/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst deleted file mode 100644 index d4bf6987fa260..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`platform.win32_ver` now produces correct *ptype* strings instead of e= mpty strings. diff --git a/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rs= t b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst deleted file mode 100644 index 81f9e937a2bff..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve error reporting in :func:`ast.literal_eval` in the presence of malfo= rmed :class:`ast.Dict` -nodes instead of silently ignoring any non-conforming elements. Patch by Cur= tis Bucher. diff --git a/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rs= t b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst deleted file mode 100644 index 15846351f25bb..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst +++ /dev/null @@ -1 +0,0 @@ -Fix possible memory leak in the C implementation of :class:`asyncio.Task`. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rs= t b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst deleted file mode 100644 index 4dc1ff480df87..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst +++ /dev/null @@ -1 +0,0 @@ -Increase reserved stack space to prevent overflow crash on Windows. diff --git a/README.rst b/README.rst index 8bf81f0066f0f..ae71b671111de 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -This is Python version 3.8.3rc1 -=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D= =3D=3D=3D=3D=3D=3D +This is Python version 3.8.3 +=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D= =3D=3D=3D =20 .. image:: https://travis-ci.org/python/cpython.svg?branch=3D3.8 :alt: CPython build status on Travis CI From webhook-mailer at python.org Thu May 14 10:17:31 2020 From: webhook-mailer at python.org (=?utf-8?q?Lum=C3=ADr?= 'Frenzy' Balhar) Date: Thu, 14 May 2020 14:17:31 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-40495: compileall option to hardlink duplicate pyc files (GH-19901) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/e77d428856fbd339faee44ff47214eda5fb5= 1d57 commit: e77d428856fbd339faee44ff47214eda5fb51d57 branch: master author: Lum=C3=ADr 'Frenzy' Balhar committer: GitHub date: 2020-05-14T16:17:22+02:00 summary: bpo-40495: compileall option to hardlink duplicate pyc files (GH-19901) compileall is now able to use hardlinks to prevent duplicates in a case when .pyc files for different optimization levels have the same content. Co-authored-by: Miro Hron=C4=8Dok Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst M Doc/library/compileall.rst M Doc/whatsnew/3.9.rst M Lib/compileall.py M Lib/test/test_compileall.py M Misc/ACKS diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst index b1ae9d60e8ae1..a511c7eda265b 100644 --- a/Doc/library/compileall.rst +++ b/Doc/library/compileall.rst @@ -113,6 +113,11 @@ compile Python sources. =20 Ignore symlinks pointing outside the given directory. =20 +.. cmdoption:: --hardlink-dupes + + If two ``.pyc`` files with different optimization level have + the same content, use hard links to consolidate duplicate files. + .. versionchanged:: 3.2 Added the ``-i``, ``-b`` and ``-h`` options. =20 @@ -125,7 +130,7 @@ compile Python sources. Added the ``--invalidation-mode`` option. =20 .. versionchanged:: 3.9 - Added the ``-s``, ``-p``, ``-e`` options. + Added the ``-s``, ``-p``, ``-e`` and ``--hardlink-dupes`` options. Raised the default recursion limit from 10 to :py:func:`sys.getrecursionlimit()`. Added the possibility to specify the ``-o`` option multiple times. @@ -143,7 +148,7 @@ runtime. Public functions ---------------- =20 -.. function:: compile_dir(dir, maxlevels=3Dsys.getrecursionlimit(), ddir=3DN= one, force=3DFalse, rx=3DNone, quiet=3D0, legacy=3DFalse, optimize=3D-1, work= ers=3D1, invalidation_mode=3DNone, \*, stripdir=3DNone, prependdir=3DNone, li= mit_sl_dest=3DNone) +.. function:: compile_dir(dir, maxlevels=3Dsys.getrecursionlimit(), ddir=3DN= one, force=3DFalse, rx=3DNone, quiet=3D0, legacy=3DFalse, optimize=3D-1, work= ers=3D1, invalidation_mode=3DNone, \*, stripdir=3DNone, prependdir=3DNone, li= mit_sl_dest=3DNone, hardlink_dupes=3DFalse) =20 Recursively descend the directory tree named by *dir*, compiling all :fil= e:`.py` files along the way. Return a true value if all the files compiled succes= sfully, @@ -193,6 +198,9 @@ Public functions the ``-s``, ``-p`` and ``-e`` options described above. They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`. =20 + If *hardlink_dupes* is true and two ``.pyc`` files with different optimiz= ation + level have the same content, use hard links to consolidate duplicate file= s. + .. versionchanged:: 3.2 Added the *legacy* and *optimize* parameter. =20 @@ -219,9 +227,9 @@ Public functions Setting *workers* to 0 now chooses the optimal number of cores. =20 .. versionchanged:: 3.9 - Added *stripdir*, *prependdir* and *limit_sl_dest* arguments. + Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* a= rguments. =20 -.. function:: compile_file(fullname, ddir=3DNone, force=3DFalse, rx=3DNone, = quiet=3D0, legacy=3DFalse, optimize=3D-1, invalidation_mode=3DNone, \*, strip= dir=3DNone, prependdir=3DNone, limit_sl_dest=3DNone) +.. function:: compile_file(fullname, ddir=3DNone, force=3DFalse, rx=3DNone, = quiet=3D0, legacy=3DFalse, optimize=3D-1, invalidation_mode=3DNone, \*, strip= dir=3DNone, prependdir=3DNone, limit_sl_dest=3DNone, hardlink_dupes=3DFalse) =20 Compile the file with path *fullname*. Return a true value if the file compiled successfully, and a false value otherwise. @@ -257,6 +265,9 @@ Public functions the ``-s``, ``-p`` and ``-e`` options described above. They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`. =20 + If *hardlink_dupes* is true and two ``.pyc`` files with different optimiz= ation + level have the same content, use hard links to consolidate duplicate file= s. + .. versionadded:: 3.2 =20 .. versionchanged:: 3.5 @@ -273,7 +284,7 @@ Public functions The *invalidation_mode* parameter's default value is updated to None. =20 .. versionchanged:: 3.9 - Added *stripdir*, *prependdir* and *limit_sl_dest* arguments. + Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* a= rguments. =20 .. function:: compile_path(skip_curdir=3DTrue, maxlevels=3D0, force=3DFalse,= quiet=3D0, legacy=3DFalse, optimize=3D-1, invalidation_mode=3DNone) =20 diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 2fec790fe3a63..fbad0fba20f4b 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -245,6 +245,16 @@ that schedules a shutdown for the default executor that = waits on the Added :class:`asyncio.PidfdChildWatcher`, a Linux-specific child watcher implementation that polls process file descriptors. (:issue:`38692`) =20 +compileall +---------- + +Added new possibility to use hardlinks for duplicated ``.pyc`` files: *hardl= ink_dupes* parameter and --hardlink-dupes command line option. +(Contributed by Lum=C3=ADr 'Frenzy' Balhar in :issue:`40495`.) + +Added new options for path manipulation in resulting ``.pyc`` files: *stripd= ir*, *prependdir*, *limit_sl_dest* parameters and -s, -p, -e command line opt= ions. +Added the possibility to specify the option for an optimization level multip= le times. +(Contributed by Lum=C3=ADr 'Frenzy' Balhar in :issue:`38112`.) + concurrent.futures ------------------ =20 diff --git a/Lib/compileall.py b/Lib/compileall.py index abe6cffce59c5..fe7f450c55e1c 100644 --- a/Lib/compileall.py +++ b/Lib/compileall.py @@ -15,6 +15,7 @@ import importlib.util import py_compile import struct +import filecmp =20 from functools import partial from pathlib import Path @@ -47,7 +48,7 @@ def _walk_dir(dir, maxlevels, quiet=3D0): def compile_dir(dir, maxlevels=3DNone, ddir=3DNone, force=3DFalse, rx=3DNone, quiet=3D0, legacy=3DFalse, optimize=3D-1, workers= =3D1, invalidation_mode=3DNone, *, stripdir=3DNone, - prependdir=3DNone, limit_sl_dest=3DNone): + prependdir=3DNone, limit_sl_dest=3DNone, hardlink_dupes=3DFa= lse): """Byte-compile all modules in the given directory tree. =20 Arguments (only dir is required): @@ -70,6 +71,7 @@ def compile_dir(dir, maxlevels=3DNone, ddir=3DNone, force= =3DFalse, after stripdir limit_sl_dest: ignore symlinks if they are pointing outside of the defined path + hardlink_dupes: hardlink duplicated pyc files """ ProcessPoolExecutor =3D None if ddir is not None and (stripdir is not None or prependdir is not None): @@ -104,7 +106,8 @@ def compile_dir(dir, maxlevels=3DNone, ddir=3DNone, force= =3DFalse, invalidation_mode=3Dinvalidation_= mode, stripdir=3Dstripdir, prependdir=3Dprependdir, - limit_sl_dest=3Dlimit_sl_dest), + limit_sl_dest=3Dlimit_sl_dest, + hardlink_dupes=3Dhardlink_dupes), files) success =3D min(results, default=3DTrue) else: @@ -112,14 +115,15 @@ def compile_dir(dir, maxlevels=3DNone, ddir=3DNone, for= ce=3DFalse, if not compile_file(file, ddir, force, rx, quiet, legacy, optimize, invalidation_mode, stripdir=3Dstripdir, prependdir=3Dprependdir, - limit_sl_dest=3Dlimit_sl_dest): + limit_sl_dest=3Dlimit_sl_dest, + hardlink_dupes=3Dhardlink_dupes): success =3D False return success =20 def compile_file(fullname, ddir=3DNone, force=3DFalse, rx=3DNone, quiet=3D0, legacy=3DFalse, optimize=3D-1, invalidation_mode=3DNone, *, stripdir=3DNone, prependdir=3D= None, - limit_sl_dest=3DNone): + limit_sl_dest=3DNone, hardlink_dupes=3DFalse): """Byte-compile one file. =20 Arguments (only fullname is required): @@ -140,6 +144,7 @@ def compile_file(fullname, ddir=3DNone, force=3DFalse, rx= =3DNone, quiet=3D0, after stripdir limit_sl_dest: ignore symlinks if they are pointing outside of the defined path. + hardlink_dupes: hardlink duplicated pyc files """ =20 if ddir is not None and (stripdir is not None or prependdir is not None): @@ -176,6 +181,14 @@ def compile_file(fullname, ddir=3DNone, force=3DFalse, r= x=3DNone, quiet=3D0, if isinstance(optimize, int): optimize =3D [optimize] =20 + # Use set() to remove duplicates. + # Use sorted() to create pyc files in a deterministic order. + optimize =3D sorted(set(optimize)) + + if hardlink_dupes and len(optimize) < 2: + raise ValueError("Hardlinking of duplicated bytecode makes sense " + "only for more than one optimization level") + if rx is not None: mo =3D rx.search(fullname) if mo: @@ -220,10 +233,16 @@ def compile_file(fullname, ddir=3DNone, force=3DFalse, = rx=3DNone, quiet=3D0, if not quiet: print('Compiling {!r}...'.format(fullname)) try: - for opt_level, cfile in opt_cfiles.items(): + for index, opt_level in enumerate(optimize): + cfile =3D opt_cfiles[opt_level] ok =3D py_compile.compile(fullname, cfile, dfile, True, optimize=3Dopt_level, invalidation_mode=3Dinvalidation= _mode) + if index > 0 and hardlink_dupes: + previous_cfile =3D opt_cfiles[optimize[index - 1]] + if filecmp.cmp(cfile, previous_cfile, shallow=3DFals= e): + os.unlink(cfile) + os.link(previous_cfile, cfile) except py_compile.PyCompileError as err: success =3D False if quiet >=3D 2: @@ -352,6 +371,9 @@ def main(): 'Python interpreter itself (specified by -O).'= )) parser.add_argument('-e', metavar=3D'DIR', dest=3D'limit_sl_dest', help=3D'Ignore symlinks pointing outsite of the DIR') + parser.add_argument('--hardlink-dupes', action=3D'store_true', + dest=3D'hardlink_dupes', + help=3D'Hardlink duplicated pyc files') =20 args =3D parser.parse_args() compile_dests =3D args.compile_dest @@ -371,6 +393,10 @@ def main(): if args.opt_levels is None: args.opt_levels =3D [-1] =20 + if len(args.opt_levels) =3D=3D 1 and args.hardlink_dupes: + parser.error(("Hardlinking of duplicated bytecode makes sense " + "only for more than one optimization level.")) + if args.ddir is not None and ( args.stripdir is not None or args.prependdir is not None ): @@ -404,7 +430,8 @@ def main(): stripdir=3Dargs.stripdir, prependdir=3Dargs.prependdir, optimize=3Dargs.opt_levels, - limit_sl_dest=3Dargs.limit_sl_dest): + limit_sl_dest=3Dargs.limit_sl_dest, + hardlink_dupes=3Dargs.hardlink_dupes= ): success =3D False else: if not compile_dir(dest, maxlevels, args.ddir, @@ -414,7 +441,8 @@ def main(): stripdir=3Dargs.stripdir, prependdir=3Dargs.prependdir, optimize=3Dargs.opt_levels, - limit_sl_dest=3Dargs.limit_sl_dest): + limit_sl_dest=3Dargs.limit_sl_dest, + hardlink_dupes=3Dargs.hardlink_dupes): success =3D False return success else: diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py index 72678945089f2..b4061b79357b8 100644 --- a/Lib/test/test_compileall.py +++ b/Lib/test/test_compileall.py @@ -1,16 +1,19 @@ -import sys import compileall +import contextlib +import filecmp import importlib.util -import test.test_importlib.util +import io +import itertools import os import pathlib import py_compile import shutil import struct +import sys import tempfile +import test.test_importlib.util import time import unittest -import io =20 from unittest import mock, skipUnless try: @@ -26,6 +29,24 @@ from .test_py_compile import SourceDateEpochTestMeta =20 =20 +def get_pyc(script, opt): + if not opt: + # Replace None and 0 with '' + opt =3D '' + return importlib.util.cache_from_source(script, optimization=3Dopt) + + +def get_pycs(script): + return [get_pyc(script, opt) for opt in (0, 1, 2)] + + +def is_hardlink(filename1, filename2): + """Returns True if two files have the same inode (hardlink)""" + inode1 =3D os.stat(filename1).st_ino + inode2 =3D os.stat(filename2).st_ino + return inode1 =3D=3D inode2 + + class CompileallTestsBase: =20 def setUp(self): @@ -825,6 +846,32 @@ def test_ignore_symlink_destination(self): self.assertTrue(os.path.isfile(allowed_bc)) self.assertFalse(os.path.isfile(prohibited_bc)) =20 + def test_hardlink_bad_args(self): + # Bad arguments combination, hardlink deduplication make sense + # only for more than one optimization level + self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes") + + def test_hardlink(self): + # 'a =3D 0' code produces the same bytecode for the 3 optimization + # levels. All three .pyc files must have the same inode (hardlinks). + # + # If deduplication is disabled, all pyc files must have different + # inodes. + for dedup in (True, False): + with tempfile.TemporaryDirectory() as path: + with self.subTest(dedup=3Ddedup): + script =3D script_helper.make_script(path, "script", "a = =3D 0") + pycs =3D get_pycs(script) + + args =3D ["-q", "-o 0", "-o 1", "-o 2"] + if dedup: + args.append("--hardlink-dupes") + self.assertRunOK(path, *args) + + self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup) + self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup) + self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup) + =20 class CommandLineTestsWithSourceEpoch(CommandLineTestsBase, unittest.TestCase, @@ -841,5 +888,176 @@ class CommandLineTestsNoSourceEpoch(CommandLineTestsBas= e, =20 =20 =20 +class HardlinkDedupTestsBase: + # Test hardlink_dupes parameter of compileall.compile_dir() + + def setUp(self): + self.path =3D None + + @contextlib.contextmanager + def temporary_directory(self): + with tempfile.TemporaryDirectory() as path: + self.path =3D path + yield path + self.path =3D None + + def make_script(self, code, name=3D"script"): + return script_helper.make_script(self.path, name, code) + + def compile_dir(self, *, dedup=3DTrue, optimize=3D(0, 1, 2), force=3DFal= se): + compileall.compile_dir(self.path, quiet=3DTrue, optimize=3Doptimize, + hardlink_dupes=3Ddedup, force=3Dforce) + + def test_bad_args(self): + # Bad arguments combination, hardlink deduplication make sense + # only for more than one optimization level + with self.temporary_directory(): + self.make_script("pass") + with self.assertRaises(ValueError): + compileall.compile_dir(self.path, quiet=3DTrue, optimize=3D0, + hardlink_dupes=3DTrue) + with self.assertRaises(ValueError): + # same optimization level specified twice: + # compile_dir() removes duplicates + compileall.compile_dir(self.path, quiet=3DTrue, optimize=3D[= 0, 0], + hardlink_dupes=3DTrue) + + def create_code(self, docstring=3DFalse, assertion=3DFalse): + lines =3D [] + if docstring: + lines.append("'module docstring'") + lines.append('x =3D 1') + if assertion: + lines.append("assert x =3D=3D 1") + return '\n'.join(lines) + + def iter_codes(self): + for docstring in (False, True): + for assertion in (False, True): + code =3D self.create_code(docstring=3Ddocstring, assertion= =3Dassertion) + yield (code, docstring, assertion) + + def test_disabled(self): + # Deduplication disabled, no hardlinks + for code, docstring, assertion in self.iter_codes(): + with self.subTest(docstring=3Ddocstring, assertion=3Dassertion): + with self.temporary_directory(): + script =3D self.make_script(code) + pycs =3D get_pycs(script) + self.compile_dir(dedup=3DFalse) + self.assertFalse(is_hardlink(pycs[0], pycs[1])) + self.assertFalse(is_hardlink(pycs[0], pycs[2])) + self.assertFalse(is_hardlink(pycs[1], pycs[2])) + + def check_hardlinks(self, script, docstring=3DFalse, assertion=3DFalse): + pycs =3D get_pycs(script) + self.assertEqual(is_hardlink(pycs[0], pycs[1]), + not assertion) + self.assertEqual(is_hardlink(pycs[0], pycs[2]), + not assertion and not docstring) + self.assertEqual(is_hardlink(pycs[1], pycs[2]), + not docstring) + + def test_hardlink(self): + # Test deduplication on all combinations + for code, docstring, assertion in self.iter_codes(): + with self.subTest(docstring=3Ddocstring, assertion=3Dassertion): + with self.temporary_directory(): + script =3D self.make_script(code) + self.compile_dir() + self.check_hardlinks(script, docstring, assertion) + + def test_only_two_levels(self): + # Don't build the 3 optimization levels, but only 2 + for opts in ((0, 1), (1, 2), (0, 2)): + with self.subTest(opts=3Dopts): + with self.temporary_directory(): + # code with no dostring and no assertion: + # same bytecode for all optimization levels + script =3D self.make_script(self.create_code()) + self.compile_dir(optimize=3Dopts) + pyc1 =3D get_pyc(script, opts[0]) + pyc2 =3D get_pyc(script, opts[1]) + self.assertTrue(is_hardlink(pyc1, pyc2)) + + def test_duplicated_levels(self): + # compile_dir() must not fail if optimize contains duplicated + # optimization levels and/or if optimization levels are not sorted. + with self.temporary_directory(): + # code with no dostring and no assertion: + # same bytecode for all optimization levels + script =3D self.make_script(self.create_code()) + self.compile_dir(optimize=3D[1, 0, 1, 0]) + pyc1 =3D get_pyc(script, 0) + pyc2 =3D get_pyc(script, 1) + self.assertTrue(is_hardlink(pyc1, pyc2)) + + def test_recompilation(self): + # Test compile_dir() when pyc files already exists and the script + # content changed + with self.temporary_directory(): + script =3D self.make_script("a =3D 0") + self.compile_dir() + # All three levels have the same inode + self.check_hardlinks(script) + + pycs =3D get_pycs(script) + inode =3D os.stat(pycs[0]).st_ino + + # Change of the module content + script =3D self.make_script("print(0)") + + # Recompilation without -o 1 + self.compile_dir(optimize=3D[0, 2], force=3DTrue) + + # opt-1.pyc should have the same inode as before and others shou= ld not + self.assertEqual(inode, os.stat(pycs[1]).st_ino) + self.assertTrue(is_hardlink(pycs[0], pycs[2])) + self.assertNotEqual(inode, os.stat(pycs[2]).st_ino) + # opt-1.pyc and opt-2.pyc have different content + self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=3DTrue)) + + def test_import(self): + # Test that import updates a single pyc file when pyc files already + # exists and the script content changed + with self.temporary_directory(): + script =3D self.make_script(self.create_code(), name=3D"module") + self.compile_dir() + # All three levels have the same inode + self.check_hardlinks(script) + + pycs =3D get_pycs(script) + inode =3D os.stat(pycs[0]).st_ino + + # Change of the module content + script =3D self.make_script("print(0)", name=3D"module") + + # Import the module in Python with -O (optimization level 1) + script_helper.assert_python_ok( + "-O", "-c", "import module", __isolated=3DFalse, PYTHONPATH= =3Dself.path + ) + + # Only opt-1.pyc is changed + self.assertEqual(inode, os.stat(pycs[0]).st_ino) + self.assertEqual(inode, os.stat(pycs[2]).st_ino) + self.assertFalse(is_hardlink(pycs[1], pycs[2])) + # opt-1.pyc and opt-2.pyc have different content + self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=3DTrue)) + + +class HardlinkDedupTestsWithSourceEpoch(HardlinkDedupTestsBase, + unittest.TestCase, + metaclass=3DSourceDateEpochTestMeta, + source_date_epoch=3DTrue): + pass + + +class HardlinkDedupTestsNoSourceEpoch(HardlinkDedupTestsBase, + unittest.TestCase, + metaclass=3DSourceDateEpochTestMeta, + source_date_epoch=3DFalse): + pass + + if __name__ =3D=3D "__main__": unittest.main() diff --git a/Misc/ACKS b/Misc/ACKS index f744de6b1f66d..b479aa5d807f5 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -86,6 +86,7 @@ Marcin Bachry Alfonso Baciero Dwayne Bailey Stig Bakken +Lum=C3=ADr Balhar Aleksandr Balezin Greg Ball Lewis Ball diff --git a/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rs= t b/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst new file mode 100644 index 0000000000000..d3049b05a78b6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst @@ -0,0 +1,2 @@ +:mod:`compileall` is now able to use hardlinks to prevent duplicates in a +case when ``.pyc`` files for different optimization levels have the same con= tent. From webhook-mailer at python.org Thu May 14 18:11:48 2020 From: webhook-mailer at python.org (Filipe =?utf-8?q?La=C3=ADns?=) Date: Thu, 14 May 2020 22:11:48 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-40548: GitHub Action workflow: skip jobs on doc only PRs (GH-19983) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/75d7257b201a56f950c20cd9f5753a83fff4= 742b commit: 75d7257b201a56f950c20cd9f5753a83fff4742b branch: master author: Filipe La=C3=ADns committer: GitHub date: 2020-05-15T00:11:40+02:00 summary: bpo-40548: GitHub Action workflow: skip jobs on doc only PRs (GH-19983) Signed-off-by: Filipe La=C3=ADns files: M .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6e6a6d2b789d3..dabfb79e9dcea 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -16,9 +16,27 @@ on: - 3.7 =20 jobs: + check_source: + name: 'Check for source changes' + runs-on: ubuntu-latest + outputs: + run_tests: ${{ steps.check.outputs.run_tests }} + steps: + - uses: actions/checkout at v2 + - name: Check for source changes + id: check + run: | + if [ -z "GITHUB_BASE_REF" ]; then + echo '::set-output name=3Drun_tests::true' + else + git fetch origin $GITHUB_BASE_REF --depth=3D1 + git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.= rst$|^Doc|^Misc)' && echo '::set-output name=3Drun_tests::true' + fi build_win32: name: 'Windows (x86)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests =3D=3D 'true' steps: - uses: actions/checkout at v1 - name: Build CPython @@ -31,6 +49,8 @@ jobs: build_win_amd64: name: 'Windows (x64)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests =3D=3D 'true' steps: - uses: actions/checkout at v1 - name: Build CPython @@ -43,6 +63,8 @@ jobs: build_macos: name: 'macOS' runs-on: macos-latest + needs: check_source + if: needs.check_source.outputs.run_tests =3D=3D 'true' steps: - uses: actions/checkout at v1 - name: Configure CPython @@ -57,6 +79,8 @@ jobs: build_ubuntu: name: 'Ubuntu' runs-on: ubuntu-latest + needs: check_source + if: needs.check_source.outputs.run_tests =3D=3D 'true' env: OPENSSL_VER: 1.1.1f steps: From webhook-mailer at python.org Thu May 14 22:08:30 2020 From: webhook-mailer at python.org (Filipe =?utf-8?q?La=C3=ADns?=) Date: Fri, 15 May 2020 02:08:30 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-40548: github actions: pass the changes check on no source changes (GH-20097) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/6a78589b6b22878491a4b042bb8b3161e1d1= 20f6 commit: 6a78589b6b22878491a4b042bb8b3161e1d120f6 branch: master author: Filipe La=C3=ADns committer: GitHub date: 2020-05-15T04:08:21+02:00 summary: bpo-40548: github actions: pass the changes check on no source changes (GH-20= 097) Signed-off-by: Filipe La=C3=ADns files: M .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index dabfb79e9dcea..dbef550643e81 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -30,7 +30,7 @@ jobs: echo '::set-output name=3Drun_tests::true' else git fetch origin $GITHUB_BASE_REF --depth=3D1 - git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.= rst$|^Doc|^Misc)' && echo '::set-output name=3Drun_tests::true' + git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.= rst$|^Doc|^Misc)' && echo '::set-output name=3Drun_tests::true' || true fi build_win32: name: 'Windows (x86)' From webhook-mailer at python.org Mon May 18 08:52:53 2020 From: webhook-mailer at python.org (Filipe =?utf-8?q?La=C3=ADns?=) Date: Mon, 18 May 2020 12:52:53 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-40548: Github Actions: update actions/checkout to v2 (GH-20164) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/c444108dd62672f2b41539bcc8f15da44501= f405 commit: c444108dd62672f2b41539bcc8f15da44501f405 branch: master author: Filipe La=C3=ADns committer: GitHub date: 2020-05-18T14:52:45+02:00 summary: bpo-40548: Github Actions: update actions/checkout to v2 (GH-20164) Signed-off-by: Filipe La=C3=ADns files: M .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index dbef550643e81..7c3bca3fc0671 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -38,7 +38,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests =3D=3D 'true' steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython run: .\PCbuild\build.bat -e -p Win32 - name: Display build info @@ -52,7 +52,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests =3D=3D 'true' steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Build CPython run: .\PCbuild\build.bat -e -p x64 - name: Display build info @@ -66,7 +66,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests =3D=3D 'true' steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Configure CPython run: ./configure --with-pydebug --with-openssl=3D/usr/local/opt/openss= l --prefix=3D/opt/python-dev - name: Build CPython @@ -84,7 +84,7 @@ jobs: env: OPENSSL_VER: 1.1.1f steps: - - uses: actions/checkout at v1 + - uses: actions/checkout at v2 - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: 'Restore OpenSSL build' From webhook-mailer at python.org Mon May 18 09:23:45 2020 From: webhook-mailer at python.org (=?utf-8?q?Lum=C3=ADr?= 'Frenzy' Balhar) Date: Mon, 18 May 2020 13:23:45 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-38112: Document that compileall.compile_[dir,file] also accept multiple opt levels (GH-20174) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/adc72bb2f9a5d8b548ee04405e19a184e569= 9e8d commit: adc72bb2f9a5d8b548ee04405e19a184e5699e8d branch: master author: Lum=C3=ADr 'Frenzy' Balhar committer: GitHub date: 2020-05-18T15:23:37+02:00 summary: bpo-38112: Document that compileall.compile_[dir,file] also accept multiple o= pt levels (GH-20174) files: M Doc/library/compileall.rst diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst index 01ab7461e9b1c..9b914b1f0d9c6 100644 --- a/Doc/library/compileall.rst +++ b/Doc/library/compileall.rst @@ -181,7 +181,8 @@ Public functions coexist. =20 *optimize* specifies the optimization level for the compiler. It is pass= ed to - the built-in :func:`compile` function. + the built-in :func:`compile` function. Accepts also a sequence of optimiz= ation + levels which lead to multiple compilations of one :file:`.py` file in one= call. =20 The argument *workers* specifies how many workers are used to compile files in parallel. The default is to not use multiple workers. @@ -256,7 +257,8 @@ Public functions coexist. =20 *optimize* specifies the optimization level for the compiler. It is pass= ed to - the built-in :func:`compile` function. + the built-in :func:`compile` function. Accepts also a sequence of optimiz= ation + levels which lead to multiple compilations of one :file:`.py` file in one= call. =20 *invalidation_mode* should be a member of the :class:`py_compile.PycInvalidationMode` enum and controls how the generat= ed From webhook-mailer at python.org Tue May 19 07:38:51 2020 From: webhook-mailer at python.org (=?utf-8?q?=C5=81ukasz?= Langa) Date: Tue, 19 May 2020 11:38:51 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Consolidate 3.9.0b1 NEWS in the master branch Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/18cb3be41b9ba1d1727fbbe1b51da6f3e5fe= e538 commit: 18cb3be41b9ba1d1727fbbe1b51da6f3e5fee538 branch: master author: =C5=81ukasz Langa committer: =C5=81ukasz Langa date: 2020-05-19T13:33:08+02:00 summary: Consolidate 3.9.0b1 NEWS in the master branch files: A Misc/NEWS.d/3.9.0b1.rst D Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst D Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst D Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst D Misc/NEWS.d/next/C API/2020-04-27-14-00-38.bpo-40217.sgn6c8.rst D Misc/NEWS.d/next/C API/2020-04-28-15-47-58.bpo-40421.ZIzOV0.rst D Misc/NEWS.d/next/C API/2020-04-28-19-29-36.bpo-40421.3uIIaB.rst D Misc/NEWS.d/next/C API/2020-04-28-23-17-27.bpo-40428.rmtpru.rst D Misc/NEWS.d/next/C API/2020-04-29-01-39-41.bpo-40429.VQfvta.rst D Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst D Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst D Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst D Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst D Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst D Misc/NEWS.d/next/Core and Builtins/2019-11-22-14-34-47.bpo-38880.evcCPa.rst D Misc/NEWS.d/next/Core and Builtins/2020-04-08-17-02-35.bpo-40228.bRaaJ-.rst D Misc/NEWS.d/next/Core and Builtins/2020-04-19-22-23-32.bpo-40328.gWJ53f.rst D Misc/NEWS.d/next/Core and Builtins/2020-04-30-00-50-25.bpo-29587.oEwSq.rst D Misc/NEWS.d/next/Core and Builtins/2020-04-30-01-44-42.bpo-1635741.GKtjqr.r= st D Misc/NEWS.d/next/Core and Builtins/2020-05-01-15-36-14.bpo-40408.XzQI59.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-03-23-28-11.bpo-40246.c1D7x8.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-05-03-36-27.bpo-1635741.ARv1YV.r= st D Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.r= st D Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst D Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst D Misc/NEWS.d/next/Documentation/2020-03-14-18-37-06.bpo-39705.nQVqig.rst D Misc/NEWS.d/next/Documentation/2020-05-04-14-20-02.bpo-40499.tjLSo8.rst D Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst D Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst D Misc/NEWS.d/next/Library/2019-09-01-15-17-49.bpo-24416.G8Ww1U.rst D Misc/NEWS.d/next/Library/2019-12-15-19-17-10.bpo-39058.7ci-vd.rst D Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst D Misc/NEWS.d/next/Library/2020-04-05-04-16-14.bpo-40192.nk8uRJ.rst D Misc/NEWS.d/next/Library/2020-04-07-23-44-06.bpo-39075.hgck3j.rst D Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst D Misc/NEWS.d/next/Library/2020-04-14-22-31-27.bpo-40291._O8hXn.rst D Misc/NEWS.d/next/Library/2020-04-25-20-00-58.bpo-40389.FPA6f0.rst D Misc/NEWS.d/next/Library/2020-04-25-23-14-11.bpo-40375.5GuK2A.rst D Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst D Misc/NEWS.d/next/Library/2020-04-27-14-48-43.bpo-39966.N5yXUe.rst D Misc/NEWS.d/next/Library/2020-04-27-17-19-09.bpo-30966._5lDx-.rst D Misc/NEWS.d/next/Library/2020-04-27-20-27-39.bpo-30966.Xmtlqu.rst D Misc/NEWS.d/next/Library/2020-04-28-18-25-27.bpo-39995.WmA3Gk.rst D Misc/NEWS.d/next/Library/2020-04-28-18-59-48.bpo-40394.Yi5uuM.rst D Misc/NEWS.d/next/Library/2020-04-29-18-02-16.bpo-40286.txbQNx.rst D Misc/NEWS.d/next/Library/2020-04-30-22-04-58.bpo-40453.ggz7sl.rst D Misc/NEWS.d/next/Library/2020-04-30-22-25-08.bpo-32494.1xaU5l.rst D Misc/NEWS.d/next/Library/2020-05-01-00-22-58.bpo-39305.Cuwu_H.rst D Misc/NEWS.d/next/Library/2020-05-01-23-24-25.bpo-39435.mgb6ib.rst D Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst D Misc/NEWS.d/next/Library/2020-05-02-12-00-28.bpo-40465.qfCjOD.rst D Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst D Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst D Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst D Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst D Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst D Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst D Misc/NEWS.d/next/Library/2020-05-06-02-33-00.bpo-31033.aX12pw.rst D Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst D Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst D Misc/NEWS.d/next/Library/2020-05-07-06-41-20.bpo-31033.waCj3n.rst D Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst D Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst D Misc/NEWS.d/next/Library/2020-05-08-15-48-39.bpo-40503.elZyxc.rst D Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst D Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst D Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst D Misc/NEWS.d/next/Library/2020-05-13-15-32-13.bpo-40607.uSPFCi.rst D Misc/NEWS.d/next/Library/2020-05-13-23-10-25.bpo-40257.aR4TGp.rst D Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst D Misc/NEWS.d/next/Library/2020-05-15-19-53-18.bpo-37630.O5kgAw.rst D Misc/NEWS.d/next/Library/2020-05-15-21-57-10.bpo-40637.lb3Bnp.rst D Misc/NEWS.d/next/Library/2020-05-16-17-05-02.bpo-40645.wYSkjT.rst D Misc/NEWS.d/next/Library/2020-05-16-19-34-38.bpo-40645.7ibMt-.rst D Misc/NEWS.d/next/Library/2020-05-17-14-00-12.bpo-40536.FCpoRA.rst D Misc/NEWS.d/next/Library/2020-05-17-21-56-38.bpo-40665.msB7u5.rst D Misc/NEWS.d/next/Library/2020-05-18-12-56-45.bpo-40662.dfornR.rst D Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst D Misc/NEWS.d/next/Tests/2020-04-29-16-08-24.bpo-40436.gDMnYl.rst D Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst D Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst D Misc/NEWS.d/next/Tools-Demos/2020-04-29-01-32-17.bpo-40431.B_aEZ0.rst D Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst D Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst D Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst D Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst D Misc/NEWS.d/next/macOS/2020-04-15-00-02-47.bpo-35569.02_1MV.rst D Misc/NEWS.d/next/macOS/2020-05-18-02-43-11.bpo-34956.35IcGF.rst diff --git a/Misc/NEWS.d/3.9.0b1.rst b/Misc/NEWS.d/3.9.0b1.rst new file mode 100644 index 0000000000000..25dd405c13039 --- /dev/null +++ b/Misc/NEWS.d/3.9.0b1.rst @@ -0,0 +1,960 @@ +.. bpo: 40501 +.. date: 2020-05-06-00-41-11 +.. nonce: _61wv_ +.. release date: 2020-05-19 +.. section: Security + +:mod:`uuid` no longer uses :mod:`ctypes` to load :file:`libuuid` or +:file:`rpcrt4.dll` at runtime. + +.. + +.. bpo: 40663 +.. date: 2020-05-17-20-38-12 +.. nonce: u2aiZf +.. section: Core and Builtins + +Correctly generate annotations where parentheses are omitted but required +(e.g: ``Type[(str, int, *other))]``. + +.. + +.. bpo: 40596 +.. date: 2020-05-11-20-53-52 +.. nonce: dwOH_X +.. section: Core and Builtins + +Fixed :meth:`str.isidentifier` for non-canonicalized strings containing +non-BMP characters on Windows. + +.. + +.. bpo: 40593 +.. date: 2020-05-11-13-50-52 +.. nonce: yuOXj3 +.. section: Core and Builtins + +Improved syntax errors for invalid characters in source code. + +.. + +.. bpo: 40585 +.. date: 2020-05-11-00-19-42 +.. nonce: yusknY +.. section: Core and Builtins + +Fixed a bug when using :func:`codeop.compile_command` that was causing +exceptions to be swallowed with the new parser. Patch by Pablo Galindo + +.. + +.. bpo: 40566 +.. date: 2020-05-09-01-39-16 +.. nonce: wlcjW_ +.. section: Core and Builtins + +Apply :pep:`573` to :mod:`abc`. + +.. + +.. bpo: 40502 +.. date: 2020-05-08-03-25-26 +.. nonce: e-VCyL +.. section: Core and Builtins + +Initialize ``n->n_col_offset``. (Patch by Joannah Nanjekye) + +.. + +.. bpo: 40527 +.. date: 2020-05-06-14-52-35 +.. nonce: gTNKuy +.. section: Core and Builtins + +Fix command line argument parsing: no longer write errors multiple times +into stderr. + +.. + +.. bpo: 1635741 +.. date: 2020-05-05-21-11-35 +.. nonce: ggwD3C +.. section: Core and Builtins + +Port :mod:`errno` to multiphase initialization (:pep:`489`). + +.. + +.. bpo: 40523 +.. date: 2020-05-05-20-36-15 +.. nonce: hKZVTB +.. section: Core and Builtins + +Add pass-throughs for :func:`hash` and :func:`reversed` to +:class:`weakref.proxy` objects. Patch by Pablo Galindo. + +.. + +.. bpo: 1635741 +.. date: 2020-05-05-03-36-27 +.. nonce: ARv1YV +.. section: Core and Builtins + +Port :mod:`syslog` to multiphase initialization (:pep:`489`). + +.. + +.. bpo: 40246 +.. date: 2020-05-03-23-28-11 +.. nonce: c1D7x8 +.. section: Core and Builtins + +Reporting a specialised error message for invalid string prefixes, which was +introduced in :issue:`40246`, is being reverted due to backwards +compatibility concerns for strings that immediately follow a reserved +keyword without whitespace between them. Constructs like `bg=3D"#d00" if cle= ar +else"#fca"` were failing to parse, which is not an acceptable breakage on +such short notice. + +.. + +.. bpo: 40417 +.. date: 2020-05-01-19-04-52 +.. nonce: Sti2lJ +.. section: Core and Builtins + +Fix imp module deprecation warning when PyImport_ReloadModule is called. +Patch by Robert Rouhani. + +.. + +.. bpo: 40408 +.. date: 2020-05-01-15-36-14 +.. nonce: XzQI59 +.. section: Core and Builtins + +Fixed support of nested type variables in GenericAlias (e.g. +``list[list[T]]``). + +.. + +.. bpo: 1635741 +.. date: 2020-04-30-01-44-42 +.. nonce: GKtjqr +.. section: Core and Builtins + +Port _stat module to multiphase initialization (:pep:`489`). + +.. + +.. bpo: 29587 +.. date: 2020-04-30-00-50-25 +.. nonce: oEwSq +.. section: Core and Builtins + +Enable implicit exception chaining when calling :meth:`generator.throw`. + +.. + +.. bpo: 40328 +.. date: 2020-04-19-22-23-32 +.. nonce: gWJ53f +.. section: Core and Builtins + +Add tools for generating mappings headers for CJKCodecs. + +.. + +.. bpo: 40228 +.. date: 2020-04-08-17-02-35 +.. nonce: bRaaJ- +.. section: Core and Builtins + +Setting frame.f_lineno is now robust w.r.t. changes in the +source-to-bytecode compiler + +.. + +.. bpo: 38880 +.. date: 2019-11-22-14-34-47 +.. nonce: evcCPa +.. section: Core and Builtins + +Added the ability to list interpreters associated with channel ends in the +internal subinterpreters module. + +.. + +.. bpo: 37986 +.. date: 2019-11-20-09-50-58 +.. nonce: o0lmA7 +.. section: Core and Builtins + +Improve performance of :c:func:`PyLong_FromDouble` for values that fit into +:c:type:`long`. + +.. + +.. bpo: 40662 +.. date: 2020-05-18-12-56-45 +.. nonce: dfornR +.. section: Library + +Fixed :func:`ast.get_source_segment` for ast nodes that have incomplete +location information. Patch by Irit Katriel. + +.. + +.. bpo: 40665 +.. date: 2020-05-17-21-56-38 +.. nonce: msB7u5 +.. section: Library + +Convert :mod:`bisect` to use Argument Clinic. + +.. + +.. bpo: 40536 +.. date: 2020-05-17-14-00-12 +.. nonce: FCpoRA +.. section: Library + +Added the :func:`~zoneinfo.available_timezones` function to the +:mod:`zoneinfo` module. Patch by Paul Ganssle. + +.. + +.. bpo: 40645 +.. date: 2020-05-16-19-34-38 +.. nonce: 7ibMt- +.. section: Library + +The :class:`hmac.HMAC` exposes internal implementation details. The +attributes ``digest_cons``, ``inner``, and ``outer`` are deprecated and will +be removed in the future. + +.. + +.. bpo: 40645 +.. date: 2020-05-16-17-05-02 +.. nonce: wYSkjT +.. section: Library + +The internal module ``_hashlib`` wraps and exposes OpenSSL's HMAC API. The +new code will be used in Python 3.10 after the internal implementation +details of the pure Python HMAC module are no longer part of the public API. + +.. + +.. bpo: 40637 +.. date: 2020-05-15-21-57-10 +.. nonce: lb3Bnp +.. section: Library + +Builtin hash modules can now be disabled or selectively enabled with +``configure --with-builtin-hashlib-hashes=3Dsha3,blake1`` or +``--without-builtin-hashlib-hashes``. + +.. + +.. bpo: 37630 +.. date: 2020-05-15-19-53-18 +.. nonce: O5kgAw +.. section: Library + +The :mod:`hashlib` module can now use SHA3 hashes and SHAKE XOF from OpenSSL +when available. + +.. + +.. bpo: 40479 +.. date: 2020-05-15-17-38-21 +.. nonce: yamSCh +.. section: Library + +The :mod:`hashlib` now compiles with OpenSSL 3.0.0-alpha2. + +.. + +.. bpo: 40257 +.. date: 2020-05-13-23-10-25 +.. nonce: aR4TGp +.. section: Library + +Revert changes to :func:`inspect.getdoc`. + +.. + +.. bpo: 40607 +.. date: 2020-05-13-15-32-13 +.. nonce: uSPFCi +.. section: Library + +When cancelling a task due to timeout, :meth:`asyncio.wait_for` will now +propagate the exception if an error happens during cancellation. Patch by +Roman Skurikhin. + +.. + +.. bpo: 40612 +.. date: 2020-05-13-10-23-29 +.. nonce: gOIreM +.. section: Library + +Fix edge cases in SyntaxError formatting. If the offset is <=3D 0, no caret = is +printed. If the offset is > line length, the caret is printed pointing just +after the last character. + +.. + +.. bpo: 40597 +.. date: 2020-05-11-19-17-23 +.. nonce: 4SGfgm +.. section: Library + +If text content lines are longer than policy.max_line_length, always use a +content-encoding to make sure they are wrapped. + +.. + +.. bpo: 40571 +.. date: 2020-05-09-15-38-25 +.. nonce: kOXZGC +.. section: Library + +Added functools.cache() as a simpler, more discoverable way to access the +unbounded cache variant of lru_cache(maxsize=3DNone). + +.. + +.. bpo: 40503 +.. date: 2020-05-08-15-48-39 +.. nonce: elZyxc +.. section: Library + +:pep:`615`, the :mod:`zoneinfo` module. Adds support for the IANA time zone +database. + +.. + +.. bpo: 40397 +.. date: 2020-05-07-21-22-04 +.. nonce: PVWFAn +.. section: Library + +Removed attributes ``__args__`` and ``__parameters__`` from special generic +aliases like ``typing.List`` (not subscripted). + +.. + +.. bpo: 40549 +.. date: 2020-05-07-20-11-51 +.. nonce: 6FiRSV +.. section: Library + +Convert posixmodule.c ("posix" or "nt" module) to the multiphase +initialization (PEP 489). + +.. + +.. bpo: 31033 +.. date: 2020-05-07-06-41-20 +.. nonce: waCj3n +.. section: Library + +Add a ``msg`` argument to :meth:`Future.cancel` and :meth:`Task.cancel`. + +.. + +.. bpo: 40541 +.. date: 2020-05-06-15-36-47 +.. nonce: LlYghL +.. section: Library + +Added an optional *counts* parameter to random.sample(). + +.. + +.. bpo: 40515 +.. date: 2020-05-06-13-51-19 +.. nonce: TUCvYB +.. section: Library + +The :mod:`ssl` and :mod:`hashlib` modules now actively check that OpenSSL is +build with thread support. Python 3.7.0 made thread support mandatory and no +longer works safely with a no-thread builds. + +.. + +.. bpo: 31033 +.. date: 2020-05-06-02-33-00 +.. nonce: aX12pw +.. section: Library + +When a :class:`asyncio.Task` is cancelled, the exception traceback now +chains all the way back to where the task was first interrupted. + +.. + +.. bpo: 40504 +.. date: 2020-05-05-17-12-47 +.. nonce: EX6wPn +.. section: Library + +:func:`functools.lru_cache` objects can now be the targets of weakrefs. + +.. + +.. bpo: 40559 +.. date: 2020-05-05-08-12-51 +.. nonce: 112wwa +.. section: Library + +Fix possible memory leak in the C implementation of :class:`asyncio.Task`. + +.. + +.. bpo: 40480 +.. date: 2020-05-04-21-21-43 +.. nonce: mjldWa +.. section: Library + +``fnmatch.fnmatch()`` could take exponential time in the presence of +multiple ``*`` pattern characters. This was repaired by generating more +elaborate regular expressions to avoid futile backtracking. + +.. + +.. bpo: 40495 +.. date: 2020-05-04-11-20-49 +.. nonce: TyTc2O +.. section: Library + +:mod:`compileall` is now able to use hardlinks to prevent duplicates in a +case when ``.pyc`` files for different optimization levels have the same +content. + +.. + +.. bpo: 40457 +.. date: 2020-05-02-17-17-37 +.. nonce: EXReI1 +.. section: Library + +The ssl module now support OpenSSL builds without TLS 1.0 and 1.1 methods. + +.. + +.. bpo: 40355 +.. date: 2020-05-02-14-24-48 +.. nonce: xTujaB +.. section: Library + +Improve error reporting in :func:`ast.literal_eval` in the presence of +malformed :class:`ast.Dict` nodes instead of silently ignoring any +non-conforming elements. Patch by Curtis Bucher. + +.. + +.. bpo: 40465 +.. date: 2020-05-02-12-00-28 +.. nonce: qfCjOD +.. section: Library + +Deprecated the optional *random* argument to *random.shuffle()*. + +.. + +.. bpo: 40459 +.. date: 2020-05-02-04-29-31 +.. nonce: fSAYVD +.. section: Library + +:func:`platform.win32_ver` now produces correct *ptype* strings instead of +empty strings. + +.. + +.. bpo: 39435 +.. date: 2020-05-01-23-24-25 +.. nonce: mgb6ib +.. section: Library + +The first argument of :func:`pickle.loads` is now positional-only. + +.. + +.. bpo: 39305 +.. date: 2020-05-01-00-22-58 +.. nonce: Cuwu_H +.. section: Library + +Update :mod:`nntplib` to merge :class:`nntplib.NNTP` and +:class:`nntplib._NNTPBase`. Patch by Dong-hee Na. + +.. + +.. bpo: 32494 +.. date: 2020-04-30-22-25-08 +.. nonce: 1xaU5l +.. section: Library + +Update :mod:`dbm.gnu` to use gdbm_count if possible when calling +:func:`len`. Patch by Dong-hee Na. + +.. + +.. bpo: 40453 +.. date: 2020-04-30-22-04-58 +.. nonce: ggz7sl +.. section: Library + +Add ``isolated=3DTrue`` keyword-only parameter to +``_xxsubinterpreters.create()``. An isolated subinterpreter cannot spawn +threads, spawn a child process or call ``os.fork()``. + +.. + +.. bpo: 40286 +.. date: 2020-04-29-18-02-16 +.. nonce: txbQNx +.. section: Library + +Remove ``_random.Random.randbytes()``: the C implementation of +``randbytes()``. Implement the method in Python to ease subclassing: +``randbytes()`` now directly reuses ``getrandbits()``. + +.. + +.. bpo: 40394 +.. date: 2020-04-28-18-59-48 +.. nonce: Yi5uuM +.. section: Library + +Added default arguments to +:meth:`difflib.SequenceMatcher.find_longest_match()`. + +.. + +.. bpo: 39995 +.. date: 2020-04-28-18-25-27 +.. nonce: WmA3Gk +.. section: Library + +Fix a race condition in concurrent.futures._ThreadWakeup: access to +_ThreadWakeup is now protected with the shutdown lock. + +.. + +.. bpo: 30966 +.. date: 2020-04-27-20-27-39 +.. nonce: Xmtlqu +.. section: Library + +``Process.shutdown(wait=3DTrue)`` of :mod:`concurrent.futures` now closes +explicitly the result queue. + +.. + +.. bpo: 30966 +.. date: 2020-04-27-17-19-09 +.. nonce: _5lDx- +.. section: Library + +Add a new :meth:`~multiprocessing.SimpleQueue.close` method to the +:class:`~multiprocessing.SimpleQueue` class to explicitly close the queue. + +.. + +.. bpo: 39966 +.. date: 2020-04-27-14-48-43 +.. nonce: N5yXUe +.. section: Library + +Revert bpo-25597. :class:`unittest.mock.MagicMock` with wraps' set uses +default return values for magic methods. + +.. + +.. bpo: 39791 +.. date: 2020-04-27-00-51-40 +.. nonce: wv8Dxn +.. section: Library + +Added ``files()`` function to importlib.resources with support for +subdirectories in package data, matching backport in importlib_resources +1.5. + +.. + +.. bpo: 40375 +.. date: 2020-04-25-23-14-11 +.. nonce: 5GuK2A +.. section: Library + +:meth:`imaplib.IMAP4.unselect` is added. Patch by Dong-hee Na. + +.. + +.. bpo: 40389 +.. date: 2020-04-25-20-00-58 +.. nonce: FPA6f0 +.. section: Library + +``repr()`` now returns ``typing.Optional[T]`` when called for +``typing.Union`` of two types, one of which is ``NoneType``. + +.. + +.. bpo: 40291 +.. date: 2020-04-14-22-31-27 +.. nonce: _O8hXn +.. section: Library + +Add support for CAN_J1939 sockets (available on Linux 5.4+) + +.. + +.. bpo: 40273 +.. date: 2020-04-14-09-54-35 +.. nonce: IN73Ks +.. section: Library + +:class:`types.MappingProxyType` is now reversible. + +.. + +.. bpo: 39075 +.. date: 2020-04-07-23-44-06 +.. nonce: hgck3j +.. section: Library + +The repr for :class:`types.SimpleNamespace` is now insertion ordered rather +than alphabetical. + +.. + +.. bpo: 40192 +.. date: 2020-04-05-04-16-14 +.. nonce: nk8uRJ +.. section: Library + +On AIX, :func:`~time.thread_time` is now implemented with +``thread_cputime()`` which has nanosecond resolution, rather than +``clock_gettime(CLOCK_THREAD_CPUTIME_ID)`` which has a resolution of 10 ms. +Patch by Batuhan Taskaya. + +.. + +.. bpo: 40025 +.. date: 2020-03-21-05-26-38 +.. nonce: DTLtyq +.. section: Library + +Raise TypeError when _generate_next_value_ is defined after members. Patch +by Ethan Onstott. + +.. + +.. bpo: 39058 +.. date: 2019-12-15-19-17-10 +.. nonce: 7ci-vd +.. section: Library + +In the argparse module, the repr for Namespace() and other argument holders +now displayed in the order attributes were added. Formerly, it displayed in +alphabetical order even though argument order is preserved the user visible +parts of the module. + +.. + +.. bpo: 24416 +.. date: 2019-09-01-15-17-49 +.. nonce: G8Ww1U +.. section: Library + +The ``isocalendar()`` methods of :class:`datetime.date` and +:class:`datetime.datetime` now return a :term:`named tuple` instead of a +:class:`tuple`. + +.. + +.. bpo: 34790 +.. date: 2020-05-08-20-18-55 +.. nonce: t6kW_1 +.. section: Documentation + +Add version of removal for explicit passing of coros to `asyncio.wait()`'s +documentation + +.. + +.. bpo: 40561 +.. date: 2020-05-08-08-39-40 +.. nonce: ZMB_2i +.. section: Documentation + +Provide docstrings for webbrowser open functions. + +.. + +.. bpo: 40499 +.. date: 2020-05-04-14-20-02 +.. nonce: tjLSo8 +.. section: Documentation + +Mention that :func:`asyncio.wait` requires a non-empty set of awaitables. + +.. + +.. bpo: 39705 +.. date: 2020-03-14-18-37-06 +.. nonce: nQVqig +.. section: Documentation + +Tutorial example for sorted() in the Loop Techniques section is given a +better explanation. Also a new example is included to explain sorted()'s +basic behavior. + +.. + +.. bpo: 39435 +.. date: 2020-01-24-05-42-57 +.. nonce: EFcdFU +.. section: Documentation + +Fix an incorrect signature for :func:`pickle.loads` in the docs + +.. + +.. bpo: 40055 +.. date: 2020-05-15-01-21-44 +.. nonce: Xp4aP9 +.. section: Tests + +distutils.tests now saves/restores warnings filters to leave them unchanged. +Importing tests imports docutils which imports pkg_resources which adds a +warnings filter. + +.. + +.. bpo: 40436 +.. date: 2020-04-29-16-08-24 +.. nonce: gDMnYl +.. section: Tests + +test_gdb and test.pythoninfo now check gdb command exit code. + +.. + +.. bpo: 40653 +.. date: 2020-05-17-03-33-00 +.. nonce: WI8UGn +.. section: Build + +Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling issue. + +.. + +.. bpo: 40514 +.. date: 2020-05-05-15-39-11 +.. nonce: bZZmuS +.. section: Build + +Add ``--with-experimental-isolated-subinterpreters`` build option to +``configure``: better isolate subinterpreters, experimental build mode. + +.. + +.. bpo: 40650 +.. date: 2020-05-17-00-08-13 +.. nonce: 4euMtU +.. section: Windows + +Include winsock2.h in pytime.c for timeval. + +.. + +.. bpo: 40458 +.. date: 2020-05-01-20-57-57 +.. nonce: Eb0ueI +.. section: Windows + +Increase reserved stack space to prevent overflow crash on Windows. + +.. + +.. bpo: 39148 +.. date: 2020-03-23-19-07-55 +.. nonce: W1YJEb +.. section: Windows + +Add IPv6 support to :mod:`asyncio` datagram endpoints in ProactorEventLoop. +Change the raised exception for unknown address families to ValueError as +it's not coming from Windows API. + +.. + +.. bpo: 34956 +.. date: 2020-05-18-02-43-11 +.. nonce: 35IcGF +.. section: macOS + +_tkinter now builds and links with non-system Tcl and Tk frameworks if they +are installed in /Library/Frameworks as had been the case on older releases +of macOS. If a macOS SDK is explicitly configured, by using ./configure +--enable-universalsdk=3D or -isysroot, only a Library/Frameworks directory in +the SDK itself is searched. The default behavior can still be overridden +with configure --with-tcltk-includes and --with-tcltk-libs. + +.. + +.. bpo: 35569 +.. date: 2020-04-15-00-02-47 +.. nonce: 02_1MV +.. section: macOS + +Expose RFC 3542 IPv6 socket options. + +.. + +.. bpo: 40479 +.. date: 2020-05-15-17-48-25 +.. nonce: B1gBl- +.. section: Tools/Demos + +Update multissltest helper to test with latest OpenSSL 1.0.2, 1.1.0, 1.1.1, +and 3.0.0-alpha. + +.. + +.. bpo: 40431 +.. date: 2020-04-29-01-32-17 +.. nonce: B_aEZ0 +.. section: Tools/Demos + +Fix a syntax typo in ``turtledemo`` that now raises a ``SyntaxError``. + +.. + +.. bpo: 40163 +.. date: 2020-04-03-08-32-31 +.. nonce: lX8K4B +.. section: Tools/Demos + +Fix multissltest tool. OpenSSL has changed download URL for old releases. +The multissltest tool now tries to download from current and old download +URLs. + +.. + +.. bpo: 39465 +.. date: 2020-05-14-00-36-19 +.. nonce: 3a5g-X +.. section: C API + +Remove the ``_PyUnicode_ClearStaticStrings()`` function from the C API. + +.. + +.. bpo: 38787 +.. date: 2020-05-10-16-39-08 +.. nonce: XzQ59O +.. section: C API + +Add PyCFunction_CheckExact() macro for exact type checks now that we allow +subtypes of PyCFunction, as well as PyCMethod_CheckExact() and +PyCMethod_Check() for the new PyCMethod subtype. + +.. + +.. bpo: 40545 +.. date: 2020-05-07-11-41-13 +.. nonce: 51DzF1 +.. section: C API + +Declare ``_PyErr_GetTopmostException()`` with ``PyAPI_FUNC()`` to properly +export the function in the C API. The function remains private (``_Py``) +prefix. + +.. + +.. bpo: 40412 +.. date: 2020-05-01-17-28-04 +.. nonce: dE0D8N +.. section: C API + +Nullify inittab_copy during finalization, preventing future interpreter +initializations in an embedded situation from crashing. Patch by Gregory +Szorc. + +.. + +.. bpo: 40429 +.. date: 2020-04-29-01-39-41 +.. nonce: VQfvta +.. section: C API + +The :c:func:`PyThreadState_GetFrame` function now returns a strong reference +to the frame. + +.. + +.. bpo: 40428 +.. date: 2020-04-28-23-17-27 +.. nonce: rmtpru +.. section: C API + +Remove the following functions from the C API. Call :c:func:`PyGC_Collect` +explicitly to free all free lists. + +* ``PyAsyncGen_ClearFreeLists()`` +* ``PyContext_ClearFreeList()`` +* ``PyDict_ClearFreeList()`` +* ``PyFloat_ClearFreeList()`` +* ``PyFrame_ClearFreeList()`` +* ``PyList_ClearFreeList()`` +* ``PySet_ClearFreeList()`` +* ``PyTuple_ClearFreeList()`` + +.. + +.. bpo: 40421 +.. date: 2020-04-28-19-29-36 +.. nonce: 3uIIaB +.. section: C API + +New :c:func:`PyFrame_GetBack` function: get the frame next outer frame. + +.. + +.. bpo: 40421 +.. date: 2020-04-28-15-47-58 +.. nonce: ZIzOV0 +.. section: C API + +New :c:func:`PyFrame_GetCode` function: return a borrowed reference to the +frame code. + +.. + +.. bpo: 40217 +.. date: 2020-04-27-14-00-38 +.. nonce: sgn6c8 +.. section: C API + +Ensure that instances of types created with +:c:func:`PyType_FromSpecWithBases` will visit its class object when +traversing references in the garbage collector (implemented as an extension +of the provided :c:member:`~PyTypeObject.tp_traverse`). Patch by Pablo +Galindo. + +.. + +.. bpo: 38787 +.. date: 2020-01-22-12-38-59 +.. nonce: HUH6hd +.. section: C API + +Module C state is now accessible from C-defined heap type methods +(:pep:`573`). Patch by Marcel Plch and Petr Viktorin. diff --git a/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst = b/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst deleted file mode 100644 index ab9062c28f4bb..0000000000000 --- a/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add ``--with-experimental-isolated-subinterpreters`` build option to -``configure``: better isolate subinterpreters, experimental build mode. diff --git a/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst = b/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst deleted file mode 100644 index 1e6c5cb32b722..0000000000000 --- a/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst +++ /dev/null @@ -1 +0,0 @@ -Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling issue. \ No newline at end of file diff --git a/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst = b/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst deleted file mode 100644 index 785ea323c316d..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Module C state is now accessible from C-defined heap type methods (:pep:`573= `). -Patch by Marcel Plch and Petr Viktorin. diff --git a/Misc/NEWS.d/next/C API/2020-04-27-14-00-38.bpo-40217.sgn6c8.rst = b/Misc/NEWS.d/next/C API/2020-04-27-14-00-38.bpo-40217.sgn6c8.rst deleted file mode 100644 index 72df4a7b56d40..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-04-27-14-00-38.bpo-40217.sgn6c8.rst=09 +++ /dev/null @@ -1,5 +0,0 @@ -Ensure that instances of types created with -:c:func:`PyType_FromSpecWithBases` will visit its class object when -traversing references in the garbage collector (implemented as an extension -of the provided :c:member:`~PyTypeObject.tp_traverse`). Patch by Pablo -Galindo. diff --git a/Misc/NEWS.d/next/C API/2020-04-28-15-47-58.bpo-40421.ZIzOV0.rst = b/Misc/NEWS.d/next/C API/2020-04-28-15-47-58.bpo-40421.ZIzOV0.rst deleted file mode 100644 index 11cf87872d513..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-04-28-15-47-58.bpo-40421.ZIzOV0.rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -New :c:func:`PyFrame_GetCode` function: return a borrowed reference to the -frame code. diff --git a/Misc/NEWS.d/next/C API/2020-04-28-19-29-36.bpo-40421.3uIIaB.rst = b/Misc/NEWS.d/next/C API/2020-04-28-19-29-36.bpo-40421.3uIIaB.rst deleted file mode 100644 index aadfb339b1711..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-04-28-19-29-36.bpo-40421.3uIIaB.rst=09 +++ /dev/null @@ -1 +0,0 @@ -New :c:func:`PyFrame_GetBack` function: get the frame next outer frame. diff --git a/Misc/NEWS.d/next/C API/2020-04-28-23-17-27.bpo-40428.rmtpru.rst = b/Misc/NEWS.d/next/C API/2020-04-28-23-17-27.bpo-40428.rmtpru.rst deleted file mode 100644 index f8710efb6c329..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-04-28-23-17-27.bpo-40428.rmtpru.rst=09 +++ /dev/null @@ -1,11 +0,0 @@ -Remove the following functions from the C API. Call :c:func:`PyGC_Collect` -explicitly to free all free lists. - -* ``PyAsyncGen_ClearFreeLists()`` -* ``PyContext_ClearFreeList()`` -* ``PyDict_ClearFreeList()`` -* ``PyFloat_ClearFreeList()`` -* ``PyFrame_ClearFreeList()`` -* ``PyList_ClearFreeList()`` -* ``PySet_ClearFreeList()`` -* ``PyTuple_ClearFreeList()`` diff --git a/Misc/NEWS.d/next/C API/2020-04-29-01-39-41.bpo-40429.VQfvta.rst = b/Misc/NEWS.d/next/C API/2020-04-29-01-39-41.bpo-40429.VQfvta.rst deleted file mode 100644 index e02aaf9003225..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-04-29-01-39-41.bpo-40429.VQfvta.rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -The :c:func:`PyThreadState_GetFrame` function now returns a strong reference -to the frame. diff --git a/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst = b/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst deleted file mode 100644 index 92bfcddf115a6..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-05-01-17-28-04.bpo-40412.dE0D8N.rst=09 +++ /dev/null @@ -1 +0,0 @@ -Nullify inittab_copy during finalization, preventing future interpreter init= ializations in an embedded situation from crashing. Patch by Gregory Szorc. diff --git a/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst = b/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst deleted file mode 100644 index d7f256a2a6b52..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst=09 +++ /dev/null @@ -1,3 +0,0 @@ -Declare ``_PyErr_GetTopmostException()`` with ``PyAPI_FUNC()`` to properly -export the function in the C API. The function remains private (``_Py``) -prefix. diff --git a/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst = b/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst deleted file mode 100644 index f80be666c1c20..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Add PyCFunction_CheckExact() macro for exact type checks now that we allow s= ubtypes of PyCFunction, -as well as PyCMethod_CheckExact() and PyCMethod_Check() for the new PyCMetho= d subtype. diff --git a/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst = b/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst deleted file mode 100644 index a08c3da566045..0000000000000 --- a/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst=09 +++ /dev/null @@ -1 +0,0 @@ -Remove the ``_PyUnicode_ClearStaticStrings()`` function from the C API. diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986= .o0lmA7.rst b/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-3798= 6.o0lmA7.rst deleted file mode 100644 index 62446e35ae01b..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Improve performance of :c:func:`PyLong_FromDouble` for values that fit into -:c:type:`long`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-11-22-14-34-47.bpo-38880= .evcCPa.rst b/Misc/NEWS.d/next/Core and Builtins/2019-11-22-14-34-47.bpo-3888= 0.evcCPa.rst deleted file mode 100644 index 07a7f5ec22aa1..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2019-11-22-14-34-47.bpo-38880.evcCPa= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Added the ability to list interpreters associated with channel ends in the i= nternal subinterpreters module. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-04-08-17-02-35.bpo-40228= .bRaaJ-.rst b/Misc/NEWS.d/next/Core and Builtins/2020-04-08-17-02-35.bpo-4022= 8.bRaaJ-.rst deleted file mode 100644 index 2a08cfd253925..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-04-08-17-02-35.bpo-40228.bRaaJ-= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Setting frame.f_lineno is now robust w.r.t. changes in the source-to-bytecod= e compiler diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-04-19-22-23-32.bpo-40328= .gWJ53f.rst b/Misc/NEWS.d/next/Core and Builtins/2020-04-19-22-23-32.bpo-4032= 8.gWJ53f.rst deleted file mode 100644 index ede446e0d500d..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-04-19-22-23-32.bpo-40328.gWJ53f= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Add tools for generating mappings headers for CJKCodecs. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-04-30-00-50-25.bpo-29587= .oEwSq.rst b/Misc/NEWS.d/next/Core and Builtins/2020-04-30-00-50-25.bpo-29587= .oEwSq.rst deleted file mode 100644 index f44aa360cc2ef..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-04-30-00-50-25.bpo-29587.oEwSq.= rst=09 +++ /dev/null @@ -1 +0,0 @@ -Enable implicit exception chaining when calling :meth:`generator.throw`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-04-30-01-44-42.bpo-16357= 41.GKtjqr.rst b/Misc/NEWS.d/next/Core and Builtins/2020-04-30-01-44-42.bpo-16= 35741.GKtjqr.rst deleted file mode 100644 index 7b3c7511e139e..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-04-30-01-44-42.bpo-1635741.GKtj= qr.rst=09 +++ /dev/null @@ -1 +0,0 @@ -Port _stat module to multiphase initialization (:pep:`489`). diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-15-36-14.bpo-40408= .XzQI59.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-15-36-14.bpo-4040= 8.XzQI59.rst deleted file mode 100644 index e6822f9c24044..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-15-36-14.bpo-40408.XzQI59= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Fixed support of nested type variables in GenericAlias (e.g. -``list[list[T]]``). diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417= .Sti2lJ.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-4041= 7.Sti2lJ.rst deleted file mode 100644 index 932e853a8933d..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-01-19-04-52.bpo-40417.Sti2lJ= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Fix imp module deprecation warning when PyImport_ReloadModule is called. Pat= ch by Robert Rouhani. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-03-23-28-11.bpo-40246= .c1D7x8.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-03-23-28-11.bpo-4024= 6.c1D7x8.rst deleted file mode 100644 index 62cd632ffd070..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-03-23-28-11.bpo-40246.c1D7x8= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Reporting a specialised error message for invalid string prefixes, which was= introduced in :issue:`40246`, is being reverted due to backwards compatibili= ty concerns for strings that immediately follow a reserved keyword without wh= itespace between them. Constructs like `bg=3D"#d00" if clear else"#fca"` were= failing to parse, which is not an acceptable breakage on such short notice. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-03-36-27.bpo-16357= 41.ARv1YV.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-03-36-27.bpo-16= 35741.ARv1YV.rst deleted file mode 100644 index f484992c487bd..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-03-36-27.bpo-1635741.ARv1= YV.rst=09 +++ /dev/null @@ -1 +0,0 @@ -Port :mod:`syslog` to multiphase initialization (:pep:`489`). diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523= .hKZVTB.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-4052= 3.hKZVTB.rst deleted file mode 100644 index 14f05be59a1ed..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Add pass-throughs for :func:`hash` and :func:`reversed` to -:class:`weakref.proxy` objects. Patch by Pablo Galindo. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-16357= 41.ggwD3C.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-16= 35741.ggwD3C.rst deleted file mode 100644 index 197eae97c3d1a..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD= 3C.rst=09 +++ /dev/null @@ -1 +0,0 @@ -Port :mod:`errno` to multiphase initialization (:pep:`489`). diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527= .gTNKuy.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-4052= 7.gTNKuy.rst deleted file mode 100644 index 19b8888230c65..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Fix command line argument parsing: no longer write errors multiple times -into stderr. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502= .e-VCyL.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-4050= 2.e-VCyL.rst deleted file mode 100644 index b0ea60234634c..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Initialize ``n->n_col_offset``. -(Patch by Joannah Nanjekye) \ No newline at end of file diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566= .wlcjW_.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-4056= 6.wlcjW_.rst deleted file mode 100644 index 92a5e3ce63217..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Apply :pep:`573` to :mod:`abc`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585= .yusknY.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-4058= 5.yusknY.rst deleted file mode 100644 index 7a9258ef0a938..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Fixed a bug when using :func:`codeop.compile_command` that was causing -exceptions to be swallowed with the new parser. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593= .yuOXj3.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-4059= 3.yuOXj3.rst deleted file mode 100644 index 5587d4f49ccf9..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Improved syntax errors for invalid characters in source code. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596= .dwOH_X.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-4059= 6.dwOH_X.rst deleted file mode 100644 index 1252db4dc9848..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Fixed :meth:`str.isidentifier` for non-canonicalized strings containing -non-BMP characters on Windows. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663= .u2aiZf.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-4066= 3.u2aiZf.rst deleted file mode 100644 index 5041abc7e3eaa..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Correctly generate annotations where parentheses are omitted but required -(e.g: ``Type[(str, int, *other))]``. diff --git a/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFc= dFU.rst b/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU= .rst deleted file mode 100644 index 40294c10df00a..0000000000000 --- a/Misc/NEWS.d/next/Documentation/2020-01-24-05-42-57.bpo-39435.EFcdFU.rst +++ /dev/null @@ -1 +0,0 @@ -Fix an incorrect signature for :func:`pickle.loads` in the docs \ No newline at end of file diff --git a/Misc/NEWS.d/next/Documentation/2020-03-14-18-37-06.bpo-39705.nQV= qig.rst b/Misc/NEWS.d/next/Documentation/2020-03-14-18-37-06.bpo-39705.nQVqig= .rst deleted file mode 100644 index 3454b928e70b4..0000000000000 --- a/Misc/NEWS.d/next/Documentation/2020-03-14-18-37-06.bpo-39705.nQVqig.rst +++ /dev/null @@ -1,2 +0,0 @@ -Tutorial example for sorted() in the Loop Techniques section is given a bett= er explanation. -Also a new example is included to explain sorted()'s basic behavior. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Documentation/2020-05-04-14-20-02.bpo-40499.tjL= So8.rst b/Misc/NEWS.d/next/Documentation/2020-05-04-14-20-02.bpo-40499.tjLSo8= .rst deleted file mode 100644 index 2b7eccbf0efaf..0000000000000 --- a/Misc/NEWS.d/next/Documentation/2020-05-04-14-20-02.bpo-40499.tjLSo8.rst +++ /dev/null @@ -1 +0,0 @@ -Mention that :func:`asyncio.wait` requires a non-empty set of awaitables. diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB= _2i.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i= .rst deleted file mode 100644 index bda24719b12cb..0000000000000 --- a/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst +++ /dev/null @@ -1 +0,0 @@ -Provide docstrings for webbrowser open functions. diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6k= W_1.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1= .rst deleted file mode 100644 index 4f349adff3346..0000000000000 --- a/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst +++ /dev/null @@ -1 +0,0 @@ -Add version of removal for explicit passing of coros to `asyncio.wait()`'s d= ocumentation \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2019-09-01-15-17-49.bpo-24416.G8Ww1U.rs= t b/Misc/NEWS.d/next/Library/2019-09-01-15-17-49.bpo-24416.G8Ww1U.rst deleted file mode 100644 index ee9af990f079d..0000000000000 --- a/Misc/NEWS.d/next/Library/2019-09-01-15-17-49.bpo-24416.G8Ww1U.rst +++ /dev/null @@ -1,3 +0,0 @@ -The ``isocalendar()`` methods of :class:`datetime.date` and -:class:`datetime.datetime` now return a :term:`named tuple` -instead of a :class:`tuple`. diff --git a/Misc/NEWS.d/next/Library/2019-12-15-19-17-10.bpo-39058.7ci-vd.rs= t b/Misc/NEWS.d/next/Library/2019-12-15-19-17-10.bpo-39058.7ci-vd.rst deleted file mode 100644 index fff13223bc4cd..0000000000000 --- a/Misc/NEWS.d/next/Library/2019-12-15-19-17-10.bpo-39058.7ci-vd.rst +++ /dev/null @@ -1,4 +0,0 @@ -In the argparse module, the repr for Namespace() and other argument holders -now displayed in the order attributes were added. Formerly, it displayed in -alphabetical order even though argument order is preserved the user visible -parts of the module. diff --git a/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rs= t b/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst deleted file mode 100644 index 7b699de4e0726..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst +++ /dev/null @@ -1 +0,0 @@ -Raise TypeError when _generate_next_value_ is defined after members. Patch b= y Ethan Onstott. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-04-05-04-16-14.bpo-40192.nk8uRJ.rs= t b/Misc/NEWS.d/next/Library/2020-04-05-04-16-14.bpo-40192.nk8uRJ.rst deleted file mode 100644 index e1e7fcefe3f94..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-05-04-16-14.bpo-40192.nk8uRJ.rst +++ /dev/null @@ -1,4 +0,0 @@ -On AIX, :func:`~time.thread_time` is now implemented with ``thread_cputime()= `` -which has nanosecond resolution, rather than -``clock_gettime(CLOCK_THREAD_CPUTIME_ID)`` which has a resolution of 10 ms. -Patch by Batuhan Taskaya. diff --git a/Misc/NEWS.d/next/Library/2020-04-07-23-44-06.bpo-39075.hgck3j.rs= t b/Misc/NEWS.d/next/Library/2020-04-07-23-44-06.bpo-39075.hgck3j.rst deleted file mode 100644 index c447a191f07f3..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-07-23-44-06.bpo-39075.hgck3j.rst +++ /dev/null @@ -1,2 +0,0 @@ -The repr for :class:`types.SimpleNamespace` is now insertion ordered rather -than alphabetical. diff --git a/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rs= t b/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst deleted file mode 100644 index 50f547f56c520..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst +++ /dev/null @@ -1 +0,0 @@ -:class:`types.MappingProxyType` is now reversible. diff --git a/Misc/NEWS.d/next/Library/2020-04-14-22-31-27.bpo-40291._O8hXn.rs= t b/Misc/NEWS.d/next/Library/2020-04-14-22-31-27.bpo-40291._O8hXn.rst deleted file mode 100644 index a560ef12302bb..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-14-22-31-27.bpo-40291._O8hXn.rst +++ /dev/null @@ -1 +0,0 @@ -Add support for CAN_J1939 sockets (available on Linux 5.4+) diff --git a/Misc/NEWS.d/next/Library/2020-04-25-20-00-58.bpo-40389.FPA6f0.rs= t b/Misc/NEWS.d/next/Library/2020-04-25-20-00-58.bpo-40389.FPA6f0.rst deleted file mode 100644 index e7a60a8d5f6f4..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-25-20-00-58.bpo-40389.FPA6f0.rst +++ /dev/null @@ -1 +0,0 @@ -``repr()`` now returns ``typing.Optional[T]`` when called for ``typing.Union= `` of two types, one of which is ``NoneType``. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-04-25-23-14-11.bpo-40375.5GuK2A.rs= t b/Misc/NEWS.d/next/Library/2020-04-25-23-14-11.bpo-40375.5GuK2A.rst deleted file mode 100644 index eb58e00bcf7d4..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-25-23-14-11.bpo-40375.5GuK2A.rst +++ /dev/null @@ -1 +0,0 @@ -:meth:`imaplib.IMAP4.unselect` is added. Patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rs= t b/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst deleted file mode 100644 index 237bcf7f99b0f..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``files()`` function to importlib.resources with support for subdirect= ories in package data, matching backport in importlib_resources 1.5. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-04-27-14-48-43.bpo-39966.N5yXUe.rs= t b/Misc/NEWS.d/next/Library/2020-04-27-14-48-43.bpo-39966.N5yXUe.rst deleted file mode 100644 index 614b452056e53..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-27-14-48-43.bpo-39966.N5yXUe.rst +++ /dev/null @@ -1,2 +0,0 @@ -Revert bpo-25597. :class:`unittest.mock.MagicMock` with wraps' set uses -default return values for magic methods. diff --git a/Misc/NEWS.d/next/Library/2020-04-27-17-19-09.bpo-30966._5lDx-.rs= t b/Misc/NEWS.d/next/Library/2020-04-27-17-19-09.bpo-30966._5lDx-.rst deleted file mode 100644 index 14e9e11538763..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-27-17-19-09.bpo-30966._5lDx-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add a new :meth:`~multiprocessing.SimpleQueue.close` method to the -:class:`~multiprocessing.SimpleQueue` class to explicitly close the queue. diff --git a/Misc/NEWS.d/next/Library/2020-04-27-20-27-39.bpo-30966.Xmtlqu.rs= t b/Misc/NEWS.d/next/Library/2020-04-27-20-27-39.bpo-30966.Xmtlqu.rst deleted file mode 100644 index 85b7934ba661e..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-27-20-27-39.bpo-30966.Xmtlqu.rst +++ /dev/null @@ -1,2 +0,0 @@ -``Process.shutdown(wait=3DTrue)`` of :mod:`concurrent.futures` now closes -explicitly the result queue. diff --git a/Misc/NEWS.d/next/Library/2020-04-28-18-25-27.bpo-39995.WmA3Gk.rs= t b/Misc/NEWS.d/next/Library/2020-04-28-18-25-27.bpo-39995.WmA3Gk.rst deleted file mode 100644 index 24aff65736337..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-28-18-25-27.bpo-39995.WmA3Gk.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a race condition in concurrent.futures._ThreadWakeup: access to -_ThreadWakeup is now protected with the shutdown lock. diff --git a/Misc/NEWS.d/next/Library/2020-04-28-18-59-48.bpo-40394.Yi5uuM.rs= t b/Misc/NEWS.d/next/Library/2020-04-28-18-59-48.bpo-40394.Yi5uuM.rst deleted file mode 100644 index ef2e239b1e678..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-28-18-59-48.bpo-40394.Yi5uuM.rst +++ /dev/null @@ -1 +0,0 @@ -Added default arguments to :meth:`difflib.SequenceMatcher.find_longest_match= ()`. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-04-29-18-02-16.bpo-40286.txbQNx.rs= t b/Misc/NEWS.d/next/Library/2020-04-29-18-02-16.bpo-40286.txbQNx.rst deleted file mode 100644 index ab9bfa65e07c2..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-29-18-02-16.bpo-40286.txbQNx.rst +++ /dev/null @@ -1,3 +0,0 @@ -Remove ``_random.Random.randbytes()``: the C implementation of -``randbytes()``. Implement the method in Python to ease subclassing: -``randbytes()`` now directly reuses ``getrandbits()``. diff --git a/Misc/NEWS.d/next/Library/2020-04-30-22-04-58.bpo-40453.ggz7sl.rs= t b/Misc/NEWS.d/next/Library/2020-04-30-22-04-58.bpo-40453.ggz7sl.rst deleted file mode 100644 index f20c666d3e27f..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-30-22-04-58.bpo-40453.ggz7sl.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add ``isolated=3DTrue`` keyword-only parameter to -``_xxsubinterpreters.create()``. An isolated subinterpreter cannot spawn -threads, spawn a child process or call ``os.fork()``. diff --git a/Misc/NEWS.d/next/Library/2020-04-30-22-25-08.bpo-32494.1xaU5l.rs= t b/Misc/NEWS.d/next/Library/2020-04-30-22-25-08.bpo-32494.1xaU5l.rst deleted file mode 100644 index 3989700c5cd83..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-04-30-22-25-08.bpo-32494.1xaU5l.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update :mod:`dbm.gnu` to use gdbm_count if possible when calling -:func:`len`. Patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/Library/2020-05-01-00-22-58.bpo-39305.Cuwu_H.rs= t b/Misc/NEWS.d/next/Library/2020-05-01-00-22-58.bpo-39305.Cuwu_H.rst deleted file mode 100644 index 7c6fdb3ede1c6..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-01-00-22-58.bpo-39305.Cuwu_H.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update :mod:`nntplib` to merge :class:`nntplib.NNTP` and -:class:`nntplib._NNTPBase`. Patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/Library/2020-05-01-23-24-25.bpo-39435.mgb6ib.rs= t b/Misc/NEWS.d/next/Library/2020-05-01-23-24-25.bpo-39435.mgb6ib.rst deleted file mode 100644 index 2a516a53ed9e2..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-01-23-24-25.bpo-39435.mgb6ib.rst +++ /dev/null @@ -1 +0,0 @@ -The first argument of :func:`pickle.loads` is now positional-only. diff --git a/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rs= t b/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst deleted file mode 100644 index d4bf6987fa260..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-02-04-29-31.bpo-40459.fSAYVD.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`platform.win32_ver` now produces correct *ptype* strings instead of e= mpty strings. diff --git a/Misc/NEWS.d/next/Library/2020-05-02-12-00-28.bpo-40465.qfCjOD.rs= t b/Misc/NEWS.d/next/Library/2020-05-02-12-00-28.bpo-40465.qfCjOD.rst deleted file mode 100644 index 7ce9a44c712e7..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-02-12-00-28.bpo-40465.qfCjOD.rst +++ /dev/null @@ -1 +0,0 @@ -Deprecated the optional *random* argument to *random.shuffle()*. diff --git a/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rs= t b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst deleted file mode 100644 index 81f9e937a2bff..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve error reporting in :func:`ast.literal_eval` in the presence of malfo= rmed :class:`ast.Dict` -nodes instead of silently ignoring any non-conforming elements. Patch by Cur= tis Bucher. diff --git a/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rs= t b/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst deleted file mode 100644 index 19b6dd685cd8c..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst +++ /dev/null @@ -1 +0,0 @@ -The ssl module now support OpenSSL builds without TLS 1.0 and 1.1 methods. diff --git a/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rs= t b/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst deleted file mode 100644 index d3049b05a78b6..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst +++ /dev/null @@ -1,2 +0,0 @@ -:mod:`compileall` is now able to use hardlinks to prevent duplicates in a -case when ``.pyc`` files for different optimization levels have the same con= tent. diff --git a/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rs= t b/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst deleted file mode 100644 index d046b1422419d..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst +++ /dev/null @@ -1 +0,0 @@ -``fnmatch.fnmatch()`` could take exponential time in the presence of multipl= e ``*`` pattern characters. This was repaired by generating more elaborate r= egular expressions to avoid futile backtracking. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rs= t b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst deleted file mode 100644 index 15846351f25bb..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst +++ /dev/null @@ -1 +0,0 @@ -Fix possible memory leak in the C implementation of :class:`asyncio.Task`. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rs= t b/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst deleted file mode 100644 index 261a49e432928..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`functools.lru_cache` objects can now be the targets of weakrefs. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-05-06-02-33-00.bpo-31033.aX12pw.rs= t b/Misc/NEWS.d/next/Library/2020-05-06-02-33-00.bpo-31033.aX12pw.rst deleted file mode 100644 index b1831e5ff8f89..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-06-02-33-00.bpo-31033.aX12pw.rst +++ /dev/null @@ -1,2 +0,0 @@ -When a :class:`asyncio.Task` is cancelled, the exception traceback -now chains all the way back to where the task was first interrupted. diff --git a/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rs= t b/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst deleted file mode 100644 index af77a57fe7237..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :mod:`ssl` and :mod:`hashlib` modules now actively check that OpenSSL is -build with thread support. Python 3.7.0 made thread support mandatory and no -longer works safely with a no-thread builds. diff --git a/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rs= t b/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst deleted file mode 100644 index a2e694ac1ad08..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst +++ /dev/null @@ -1 +0,0 @@ -Added an optional *counts* parameter to random.sample(). diff --git a/Misc/NEWS.d/next/Library/2020-05-07-06-41-20.bpo-31033.waCj3n.rs= t b/Misc/NEWS.d/next/Library/2020-05-07-06-41-20.bpo-31033.waCj3n.rst deleted file mode 100644 index e3d35a04aab51..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-07-06-41-20.bpo-31033.waCj3n.rst +++ /dev/null @@ -1 +0,0 @@ -Add a ``msg`` argument to :meth:`Future.cancel` and :meth:`Task.cancel`. diff --git a/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rs= t b/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst deleted file mode 100644 index 873ff49c1eb00..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst +++ /dev/null @@ -1,2 +0,0 @@ -Convert posixmodule.c ("posix" or "nt" module) to the multiphase -initialization (PEP 489). diff --git a/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rs= t b/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst deleted file mode 100644 index 46e806a2dc222..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst +++ /dev/null @@ -1,2 +0,0 @@ -Removed attributes ``__args__`` and ``__parameters__`` from special generic -aliases like ``typing.List`` (not subscripted). diff --git a/Misc/NEWS.d/next/Library/2020-05-08-15-48-39.bpo-40503.elZyxc.rs= t b/Misc/NEWS.d/next/Library/2020-05-08-15-48-39.bpo-40503.elZyxc.rst deleted file mode 100644 index d68797a738c2c..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-08-15-48-39.bpo-40503.elZyxc.rst +++ /dev/null @@ -1 +0,0 @@ -:pep:`615`, the :mod:`zoneinfo` module. Adds support for the IANA time zone = database. diff --git a/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rs= t b/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst deleted file mode 100644 index 476770f6974d2..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added functools.cache() as a simpler, more discoverable way to access the -unbounded cache variant of lru_cache(maxsize=3DNone). diff --git a/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rs= t b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst deleted file mode 100644 index 1b9fe609c25b7..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst +++ /dev/null @@ -1 +0,0 @@ -If text content lines are longer than policy.max_line_length, always use a c= ontent-encoding to make sure they are wrapped. diff --git a/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rs= t b/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst deleted file mode 100644 index 32cc8073d3f79..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix edge cases in SyntaxError formatting. If the offset is <=3D 0, no caret = is printed. -If the offset is > line length, the caret is printed pointing just after the= last character. diff --git a/Misc/NEWS.d/next/Library/2020-05-13-15-32-13.bpo-40607.uSPFCi.rs= t b/Misc/NEWS.d/next/Library/2020-05-13-15-32-13.bpo-40607.uSPFCi.rst deleted file mode 100644 index 8060628b59548..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-13-15-32-13.bpo-40607.uSPFCi.rst +++ /dev/null @@ -1,3 +0,0 @@ -When cancelling a task due to timeout, :meth:`asyncio.wait_for` will now -propagate the exception if an error happens during cancellation. -Patch by Roman Skurikhin. diff --git a/Misc/NEWS.d/next/Library/2020-05-13-23-10-25.bpo-40257.aR4TGp.rs= t b/Misc/NEWS.d/next/Library/2020-05-13-23-10-25.bpo-40257.aR4TGp.rst deleted file mode 100644 index 9d4037bc9aa79..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-13-23-10-25.bpo-40257.aR4TGp.rst +++ /dev/null @@ -1 +0,0 @@ -Revert changes to :func:`inspect.getdoc`. diff --git a/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rs= t b/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst deleted file mode 100644 index 87ede982f2967..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst +++ /dev/null @@ -1 +0,0 @@ -The :mod:`hashlib` now compiles with OpenSSL 3.0.0-alpha2. diff --git a/Misc/NEWS.d/next/Library/2020-05-15-19-53-18.bpo-37630.O5kgAw.rs= t b/Misc/NEWS.d/next/Library/2020-05-15-19-53-18.bpo-37630.O5kgAw.rst deleted file mode 100644 index 78458e6d1a46b..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-15-19-53-18.bpo-37630.O5kgAw.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :mod:`hashlib` module can now use SHA3 hashes and SHAKE XOF from OpenSSL -when available. diff --git a/Misc/NEWS.d/next/Library/2020-05-15-21-57-10.bpo-40637.lb3Bnp.rs= t b/Misc/NEWS.d/next/Library/2020-05-15-21-57-10.bpo-40637.lb3Bnp.rst deleted file mode 100644 index d05e57d86b6ec..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-15-21-57-10.bpo-40637.lb3Bnp.rst +++ /dev/null @@ -1,2 +0,0 @@ -Builtin hash modules can now be disabled or selectively enabled with -``configure --with-builtin-hashlib-hashes=3Dsha3,blake1`` or ``--without-bui= ltin-hashlib-hashes``. diff --git a/Misc/NEWS.d/next/Library/2020-05-16-17-05-02.bpo-40645.wYSkjT.rs= t b/Misc/NEWS.d/next/Library/2020-05-16-17-05-02.bpo-40645.wYSkjT.rst deleted file mode 100644 index bb7eacdc5aa78..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-16-17-05-02.bpo-40645.wYSkjT.rst +++ /dev/null @@ -1 +0,0 @@ -The internal module ``_hashlib`` wraps and exposes OpenSSL's HMAC API. The n= ew code will be used in Python 3.10 after the internal implementation details= of the pure Python HMAC module are no longer part of the public API. diff --git a/Misc/NEWS.d/next/Library/2020-05-16-19-34-38.bpo-40645.7ibMt-.rs= t b/Misc/NEWS.d/next/Library/2020-05-16-19-34-38.bpo-40645.7ibMt-.rst deleted file mode 100644 index 19d5a651eb49a..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-16-19-34-38.bpo-40645.7ibMt-.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :class:`hmac.HMAC` exposes internal implementation details. The -attributes ``digest_cons``, ``inner``, and ``outer`` are deprecated and will -be removed in the future. diff --git a/Misc/NEWS.d/next/Library/2020-05-17-14-00-12.bpo-40536.FCpoRA.rs= t b/Misc/NEWS.d/next/Library/2020-05-17-14-00-12.bpo-40536.FCpoRA.rst deleted file mode 100644 index ba7773bf61fa6..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-17-14-00-12.bpo-40536.FCpoRA.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added the :func:`~zoneinfo.available_timezones` function to the -:mod:`zoneinfo` module. Patch by Paul Ganssle. diff --git a/Misc/NEWS.d/next/Library/2020-05-17-21-56-38.bpo-40665.msB7u5.rs= t b/Misc/NEWS.d/next/Library/2020-05-17-21-56-38.bpo-40665.msB7u5.rst deleted file mode 100644 index 160b2ca75d31c..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-17-21-56-38.bpo-40665.msB7u5.rst +++ /dev/null @@ -1 +0,0 @@ -Convert :mod:`bisect` to use Argument Clinic. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-05-18-12-56-45.bpo-40662.dfornR.rs= t b/Misc/NEWS.d/next/Library/2020-05-18-12-56-45.bpo-40662.dfornR.rst deleted file mode 100644 index a960c3f61b6bb..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-18-12-56-45.bpo-40662.dfornR.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed :func:`ast.get_source_segment` for ast nodes that have incomplete loca= tion information. Patch by Irit Katriel. diff --git a/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.r= st b/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst deleted file mode 100644 index 5ce22eb8a92ee..0000000000000 --- a/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst +++ /dev/null @@ -1,2 +0,0 @@ -:mod:`uuid` no longer uses :mod:`ctypes` to load :file:`libuuid` or -:file:`rpcrt4.dll` at runtime. diff --git a/Misc/NEWS.d/next/Tests/2020-04-29-16-08-24.bpo-40436.gDMnYl.rst = b/Misc/NEWS.d/next/Tests/2020-04-29-16-08-24.bpo-40436.gDMnYl.rst deleted file mode 100644 index 0aee2c3aa2b4d..0000000000000 --- a/Misc/NEWS.d/next/Tests/2020-04-29-16-08-24.bpo-40436.gDMnYl.rst +++ /dev/null @@ -1 +0,0 @@ -test_gdb and test.pythoninfo now check gdb command exit code. diff --git a/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst = b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst deleted file mode 100644 index edb01182c3a5c..0000000000000 --- a/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst +++ /dev/null @@ -1,3 +0,0 @@ -distutils.tests now saves/restores warnings filters to leave them unchanged. -Importing tests imports docutils which imports pkg_resources which adds a -warnings filter. diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4= B.rst b/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst deleted file mode 100644 index fc0a22a0a953e..0000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix multissltest tool. OpenSSL has changed download URL for old releases. -The multissltest tool now tries to download from current and old download -URLs. diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-04-29-01-32-17.bpo-40431.B_aEZ= 0.rst b/Misc/NEWS.d/next/Tools-Demos/2020-04-29-01-32-17.bpo-40431.B_aEZ0.rst deleted file mode 100644 index abef046326fcb..0000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2020-04-29-01-32-17.bpo-40431.B_aEZ0.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a syntax typo in ``turtledemo`` that now raises a ``SyntaxError``. diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl= -.rst b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst deleted file mode 100644 index b59035971d7b0..0000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update multissltest helper to test with latest OpenSSL 1.0.2, 1.1.0, 1.1.1, -and 3.0.0-alpha. diff --git a/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rs= t b/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst deleted file mode 100644 index 7c70dce1e7333..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add IPv6 support to :mod:`asyncio` datagram endpoints in ProactorEventLoop. -Change the raised exception for unknown address families to ValueError -as it's not coming from Windows API. diff --git a/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rs= t b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst deleted file mode 100644 index 4dc1ff480df87..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst +++ /dev/null @@ -1 +0,0 @@ -Increase reserved stack space to prevent overflow crash on Windows. diff --git a/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rs= t b/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst deleted file mode 100644 index db13e58b14a79..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst +++ /dev/null @@ -1 +0,0 @@ -Include winsock2.h in pytime.c for timeval. \ No newline at end of file diff --git a/Misc/NEWS.d/next/macOS/2020-04-15-00-02-47.bpo-35569.02_1MV.rst = b/Misc/NEWS.d/next/macOS/2020-04-15-00-02-47.bpo-35569.02_1MV.rst deleted file mode 100644 index ed48efd7f5c49..0000000000000 --- a/Misc/NEWS.d/next/macOS/2020-04-15-00-02-47.bpo-35569.02_1MV.rst +++ /dev/null @@ -1 +0,0 @@ -Expose RFC 3542 IPv6 socket options. diff --git a/Misc/NEWS.d/next/macOS/2020-05-18-02-43-11.bpo-34956.35IcGF.rst = b/Misc/NEWS.d/next/macOS/2020-05-18-02-43-11.bpo-34956.35IcGF.rst deleted file mode 100644 index 6ad9c1ac93355..0000000000000 --- a/Misc/NEWS.d/next/macOS/2020-05-18-02-43-11.bpo-34956.35IcGF.rst +++ /dev/null @@ -1,6 +0,0 @@ -_tkinter now builds and links with non-system Tcl and Tk frameworks if they -are installed in /Library/Frameworks as had been the case on older releases -of macOS. If a macOS SDK is explicitly configured, by using ./configure ---enable-universalsdk=3D or -isysroot, only a Library/Frameworks directory in -the SDK itself is searched. The default behavior can still be overridden with -configure --with-tcltk-includes and --with-tcltk-libs. From webhook-mailer at python.org Thu May 21 00:23:04 2020 From: webhook-mailer at python.org (=?utf-8?q?R=C3=A9mi?= Lapeyre) Date: Thu, 21 May 2020 04:23:04 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Use f-strings in argparse HOWTO (GH-20070) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/7efb826c3e54edf10315e4baf5e96fe9a372= 9da4 commit: 7efb826c3e54edf10315e4baf5e96fe9a3729da4 branch: master author: R=C3=A9mi Lapeyre committer: GitHub date: 2020-05-20T21:22:59-07:00 summary: Use f-strings in argparse HOWTO (GH-20070) files: M Doc/howto/argparse.rst diff --git a/Doc/howto/argparse.rst b/Doc/howto/argparse.rst index e78a022b372fa..76d8e6be42935 100644 --- a/Doc/howto/argparse.rst +++ b/Doc/howto/argparse.rst @@ -353,7 +353,7 @@ Our program keeps growing in complexity:: args =3D parser.parse_args() answer =3D args.square**2 if args.verbose: - print("the square of {} equals {}".format(args.square, answer)) + print(f"the square of {args.square} equals {answer}") else: print(answer) =20 @@ -387,9 +387,9 @@ multiple verbosity values, and actually get to use them:: args =3D parser.parse_args() answer =3D args.square**2 if args.verbosity =3D=3D 2: - print("the square of {} equals {}".format(args.square, answer)) + print(f"the square of {args.square} equals {answer}") elif args.verbosity =3D=3D 1: - print("{}^2 =3D=3D {}".format(args.square, answer)) + print(f"{args.square}^2 =3D=3D {answer}") else: print(answer) =20 @@ -421,9 +421,9 @@ Let's fix it by restricting the values the ``--verbosity`= ` option can accept:: args =3D parser.parse_args() answer =3D args.square**2 if args.verbosity =3D=3D 2: - print("the square of {} equals {}".format(args.square, answer)) + print(f"the square of {args.square} equals {answer}") elif args.verbosity =3D=3D 1: - print("{}^2 =3D=3D {}".format(args.square, answer)) + print(f"{args.square}^2 =3D=3D {answer}") else: print(answer) =20 @@ -461,9 +461,9 @@ verbosity argument (check the output of ``python --help``= ):: args =3D parser.parse_args() answer =3D args.square**2 if args.verbosity =3D=3D 2: - print("the square of {} equals {}".format(args.square, answer)) + print(f"the square of {args.square} equals {answer}") elif args.verbosity =3D=3D 1: - print("{}^2 =3D=3D {}".format(args.square, answer)) + print(f"{args.square}^2 =3D=3D {answer}") else: print(answer) =20 @@ -529,9 +529,9 @@ Let's fix:: =20 # bugfix: replace =3D=3D with >=3D if args.verbosity >=3D 2: - print("the square of {} equals {}".format(args.square, answer)) + print(f"the square of {args.square} equals {answer}") elif args.verbosity >=3D 1: - print("{}^2 =3D=3D {}".format(args.square, answer)) + print(f"{args.square}^2 =3D=3D {answer}") else: print(answer) =20 @@ -566,9 +566,9 @@ Let's fix that bug:: args =3D parser.parse_args() answer =3D args.square**2 if args.verbosity >=3D 2: - print("the square of {} equals {}".format(args.square, answer)) + print(f"the square of {args.square} equals {answer}") elif args.verbosity >=3D 1: - print("{}^2 =3D=3D {}".format(args.square, answer)) + print(f"{args.square}^2 =3D=3D {answer}") else: print(answer) =20 @@ -606,9 +606,9 @@ not just squares:: args =3D parser.parse_args() answer =3D args.x**args.y if args.verbosity >=3D 2: - print("{} to the power {} equals {}".format(args.x, args.y, answer)) + print(f"{args.x} to the power {args.y} equals {answer}") elif args.verbosity >=3D 1: - print("{}^{} =3D=3D {}".format(args.x, args.y, answer)) + print(f"{args.x}^{args.y} =3D=3D {answer}") else: print(answer) =20 @@ -645,9 +645,9 @@ to display *more* text instead:: args =3D parser.parse_args() answer =3D args.x**args.y if args.verbosity >=3D 2: - print("Running '{}'".format(__file__)) + print(f"Running '{__file__}'") if args.verbosity >=3D 1: - print("{}^{} =3D=3D ".format(args.x, args.y), end=3D"") + print(f"{args.x}^{args.y} =3D=3D ", end=3D"") print(answer) =20 Output: @@ -688,9 +688,9 @@ which will be the opposite of the ``--verbose`` one:: if args.quiet: print(answer) elif args.verbose: - print("{} to the power {} equals {}".format(args.x, args.y, answer)) + print(f"{args.x} to the power {args.y} equals {answer}") else: - print("{}^{} =3D=3D {}".format(args.x, args.y, answer)) + print(f"{args.x}^{args.y} =3D=3D {answer}") =20 Our program is now simpler, and we've lost some functionality for the sake of demonstration. Anyways, here's the output: From webhook-mailer at python.org Fri May 22 20:51:03 2020 From: webhook-mailer at python.org (Ville =?utf-8?q?Skytt=C3=A4?=) Date: Sat, 23 May 2020 00:51:03 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Cosmetic smtplib changes (GH-8718) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/da51ba442c7bf717872633676207c1ae10e9= 9c99 commit: da51ba442c7bf717872633676207c1ae10e99c99 branch: master author: Ville Skytt=C3=A4 committer: GitHub date: 2020-05-22T17:50:58-07:00 summary: Cosmetic smtplib changes (GH-8718) Some cosmetic smtplib changes here. Let me know if you'd like a bpo/news entr= y or splitting this PR into two. Automerge-Triggered-By: @maxking files: M Doc/library/smtplib.rst M Lib/smtplib.py diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst index a88e358eae5fd..b3cc60357f554 100644 --- a/Doc/library/smtplib.rst +++ b/Doc/library/smtplib.rst @@ -279,9 +279,10 @@ An :class:`SMTP` instance has the following methods: response for ESMTP option and store them for use by :meth:`has_extn`. Also sets several informational attributes: the message returned by the server is stored as the :attr:`ehlo_resp` attribute, :attr:`does_esmt= p` - is set to true or false depending on whether the server supports ESMTP, a= nd - :attr:`esmtp_features` will be a dictionary containing the names of the - SMTP service extensions this server supports, and their parameters (if an= y). + is set to ``True`` or ``False`` depending on whether the server supports + ESMTP, and :attr:`esmtp_features` will be a dictionary containing the nam= es + of the SMTP service extensions this server supports, and their parameters + (if any). =20 Unless you wish to use :meth:`has_extn` before sending mail, it should no= t be necessary to call this method explicitly. It will be implicitly called by diff --git a/Lib/smtplib.py b/Lib/smtplib.py index 7808ba01cba88..e2dbbbcf2e6d1 100755 --- a/Lib/smtplib.py +++ b/Lib/smtplib.py @@ -222,7 +222,7 @@ class SMTP: helo_resp =3D None ehlo_msg =3D "ehlo" ehlo_resp =3D None - does_esmtp =3D 0 + does_esmtp =3D False default_port =3D SMTP_PORT =20 def __init__(self, host=3D'', port=3D0, local_hostname=3DNone, @@ -452,7 +452,7 @@ def ehlo(self, name=3D''): self.ehlo_resp =3D msg if code !=3D 250: return (code, msg) - self.does_esmtp =3D 1 + self.does_esmtp =3D True #parse the ehlo response -ddm assert isinstance(self.ehlo_resp, bytes), repr(self.ehlo_resp) resp =3D self.ehlo_resp.decode("latin-1").split('\n') @@ -781,7 +781,7 @@ def starttls(self, keyfile=3DNone, certfile=3DNone, conte= xt=3DNone): self.helo_resp =3D None self.ehlo_resp =3D None self.esmtp_features =3D {} - self.does_esmtp =3D 0 + self.does_esmtp =3D False else: # RFC 3207: # 501 Syntax error (no parameters allowed) From webhook-mailer at python.org Sun May 24 17:13:01 2020 From: webhook-mailer at python.org (=?utf-8?q?R=C3=A9mi?= Lapeyre) Date: Sun, 24 May 2020 21:13:01 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-36290: Fix keytword collision handling in AST node constructors (GH-12382) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/c73914a562580ae72048876cb42ed8e76e2c= 83f9 commit: c73914a562580ae72048876cb42ed8e76e2c83f9 branch: master author: R=C3=A9mi Lapeyre committer: GitHub date: 2020-05-24T22:12:57+01:00 summary: bpo-36290: Fix keytword collision handling in AST node constructors (GH-12382) files: A Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst M Lib/ast.py M Lib/test/test_ast.py M Parser/asdl_c.py M Python/Python-ast.c diff --git a/Lib/ast.py b/Lib/ast.py index 52e51b4858774..6a5b39e270b9b 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -524,6 +524,13 @@ def __instancecheck__(cls, inst): return type.__instancecheck__(cls, inst) =20 def _new(cls, *args, **kwargs): + for key in kwargs: + if key not in cls._fields: + # arbitrary keyword arguments are accepted + continue + pos =3D cls._fields.index(key) + if pos < len(args): + raise TypeError(f"{cls.__name__} got multiple values for argumen= t {key!r}") if cls in _const_types: return Constant(*args, **kwargs) return Constant.__new__(cls, *args, **kwargs) diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index e55d10badc37e..3e9c8b55cdff4 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -402,6 +402,15 @@ def test_classattrs(self): self.assertRaises(TypeError, ast.Num, 1, None, 2) self.assertRaises(TypeError, ast.Num, 1, None, 2, lineno=3D0) =20 + # Arbitrary keyword arguments are supported + self.assertEqual(ast.Constant(1, foo=3D'bar').foo, 'bar') + self.assertEqual(ast.Num(1, foo=3D'bar').foo, 'bar') + + with self.assertRaisesRegex(TypeError, "Num got multiple values for = argument 'n'"): + ast.Num(1, n=3D2) + with self.assertRaisesRegex(TypeError, "Constant got multiple values= for argument 'value'"): + ast.Constant(1, value=3D2) + self.assertEqual(ast.Num(42).n, 42) self.assertEqual(ast.Num(4.25).n, 4.25) self.assertEqual(ast.Num(4.25j).n, 4.25j) diff --git a/Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rs= t b/Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst new file mode 100644 index 0000000000000..a9afe62b0c46e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst @@ -0,0 +1,2 @@ +AST nodes are now raising :exc:`TypeError` on conflicting keyword arguments. +Patch contributed by R=C3=A9mi Lapeyre. diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 6d572755e68e8..f8729cd170b10 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -695,8 +695,9 @@ def visitModule(self, mod): } if (fields) { numfields =3D PySequence_Size(fields); - if (numfields =3D=3D -1) + if (numfields =3D=3D -1) { goto cleanup; + } } =20 res =3D 0; /* if no error occurs, this stays 0 to the end */ @@ -717,15 +718,35 @@ def visitModule(self, mod): } res =3D PyObject_SetAttr(self, name, PyTuple_GET_ITEM(args, i)); Py_DECREF(name); - if (res < 0) + if (res < 0) { goto cleanup; + } } if (kw) { i =3D 0; /* needed by PyDict_Next */ while (PyDict_Next(kw, &i, &key, &value)) { + int contains =3D PySequence_Contains(fields, key); + if (contains =3D=3D -1) { + res =3D -1; + goto cleanup; + } else if (contains =3D=3D 1) { + Py_ssize_t p =3D PySequence_Index(fields, key); + if (p =3D=3D -1) { + res =3D -1; + goto cleanup; + } + if (p < PyTuple_GET_SIZE(args)) { + PyErr_Format(PyExc_TypeError, + "%.400s got multiple values for argument '%U'", + Py_TYPE(self)->tp_name, key); + res =3D -1; + goto cleanup; + } + } res =3D PyObject_SetAttr(self, key, value); - if (res < 0) + if (res < 0) { goto cleanup; + } } } cleanup: diff --git a/Python/Python-ast.c b/Python/Python-ast.c index f34b1450c66ef..d2edf74c81216 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -1131,8 +1131,9 @@ ast_type_init(PyObject *self, PyObject *args, PyObject = *kw) } if (fields) { numfields =3D PySequence_Size(fields); - if (numfields =3D=3D -1) + if (numfields =3D=3D -1) { goto cleanup; + } } =20 res =3D 0; /* if no error occurs, this stays 0 to the end */ @@ -1153,15 +1154,35 @@ ast_type_init(PyObject *self, PyObject *args, PyObjec= t *kw) } res =3D PyObject_SetAttr(self, name, PyTuple_GET_ITEM(args, i)); Py_DECREF(name); - if (res < 0) + if (res < 0) { goto cleanup; + } } if (kw) { i =3D 0; /* needed by PyDict_Next */ while (PyDict_Next(kw, &i, &key, &value)) { + int contains =3D PySequence_Contains(fields, key); + if (contains =3D=3D -1) { + res =3D -1; + goto cleanup; + } else if (contains =3D=3D 1) { + Py_ssize_t p =3D PySequence_Index(fields, key); + if (p =3D=3D -1) { + res =3D -1; + goto cleanup; + } + if (p < PyTuple_GET_SIZE(args)) { + PyErr_Format(PyExc_TypeError, + "%.400s got multiple values for argument '%U'", + Py_TYPE(self)->tp_name, key); + res =3D -1; + goto cleanup; + } + } res =3D PyObject_SetAttr(self, key, value); - if (res < 0) + if (res < 0) { goto cleanup; + } } } cleanup: From webhook-mailer at python.org Mon May 25 10:54:21 2020 From: webhook-mailer at python.org (Miro =?utf-8?q?Hron=C4=8Dok?=) Date: Mon, 25 May 2020 14:54:21 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-38972: Fix typos in PowerShell Execution Policies links (GH-20383) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/ef16958d17e83723334a51428f410f726d64= 92a7 commit: ef16958d17e83723334a51428f410f726d6492a7 branch: master author: Miro Hron=C4=8Dok committer: GitHub date: 2020-05-25T16:54:14+02:00 summary: bpo-38972: Fix typos in PowerShell Execution Policies links (GH-20383) files: M Doc/using/venv-create.inc M Lib/venv/scripts/common/Activate.ps1 diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc index c39048d618812..c8f6e8f87d567 100644 --- a/Doc/using/venv-create.inc +++ b/Doc/using/venv-create.inc @@ -91,7 +91,7 @@ The command, if run with ``-h``, will show the available op= tions:: PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentU= ser =20 See `About Execution Policies - `_ + `_ for more information. =20 The created ``pyvenv.cfg`` file also includes the diff --git a/Lib/venv/scripts/common/Activate.ps1 b/Lib/venv/scripts/common/A= ctivate.ps1 index b8245b1bbe5c8..a3bc6fb1f05bf 100644 --- a/Lib/venv/scripts/common/Activate.ps1 +++ b/Lib/venv/scripts/common/Activate.ps1 @@ -45,7 +45,7 @@ command: PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser =20 For more information on Execution Policies:=20 -ttps:/go.microsoft.com/fwlink/?LinkID=3D135170 +https://go.microsoft.com/fwlink/?LinkID=3D135170 =20 #> Param( From webhook-mailer at python.org Mon May 25 11:42:56 2020 From: webhook-mailer at python.org (Miro =?utf-8?q?Hron=C4=8Dok?=) Date: Mon, 25 May 2020 15:42:56 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-39245: Fix docs links to the stable ABI (GH-20388) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/e50883ccc4bfa198c3d5e3367306324fc497= 30cb commit: e50883ccc4bfa198c3d5e3367306324fc49730cb branch: master author: Miro Hron=C4=8Dok committer: GitHub date: 2020-05-25T08:42:48-07:00 summary: bpo-39245: Fix docs links to the stable ABI (GH-20388) Automerge-Triggered-By: @vstinner files: M Doc/c-api/call.rst diff --git a/Doc/c-api/call.rst b/Doc/c-api/call.rst index 06db12666d787..0832e7e219360 100644 --- a/Doc/c-api/call.rst +++ b/Doc/c-api/call.rst @@ -144,7 +144,7 @@ Vectorcall Support API However, the function ``PyVectorcall_NARGS`` should be used to allow for future extensions. =20 - This function is not part of the `limited API `_. + This function is not part of the :ref:`limited API `. =20 .. versionadded:: 3.8 =20 @@ -158,7 +158,7 @@ Vectorcall Support API This is mostly useful to check whether or not *op* supports vectorcall, which can be done by checking ``PyVectorcall_Function(op) !=3D NULL``. =20 - This function is not part of the `limited API `_. + This function is not part of the :ref:`limited API `. =20 .. versionadded:: 3.8 =20 @@ -172,7 +172,7 @@ Vectorcall Support API It does not check the :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag and it does not fall back to ``tp_call``. =20 - This function is not part of the `limited API `_. + This function is not part of the :ref:`limited API `. =20 .. versionadded:: 3.8 =20 @@ -256,7 +256,7 @@ please see individual documentation for details. Return the result of the call on success, or raise an exception and return *NULL* on failure. =20 - This function is not part of the `limited API `_. + This function is not part of the :ref:`limited API `. =20 .. versionadded:: 3.9 =20 @@ -343,7 +343,7 @@ please see individual documentation for details. Return the result of the call on success, or raise an exception and return *NULL* on failure. =20 - This function is not part of the `limited API `_. + This function is not part of the :ref:`limited API `. =20 .. versionadded:: 3.9 =20 @@ -357,7 +357,7 @@ please see individual documentation for details. Return the result of the call on success, or raise an exception and return *NULL* on failure. =20 - This function is not part of the `limited API `_. + This function is not part of the :ref:`limited API `. =20 .. versionadded:: 3.9 =20 @@ -372,7 +372,7 @@ please see individual documentation for details. Return the result of the call on success, or raise an exception and return *NULL* on failure. =20 - This function is not part of the `limited API `_. + This function is not part of the :ref:`limited API `. =20 .. versionadded:: 3.9 =20 @@ -388,7 +388,7 @@ please see individual documentation for details. already has a dictionary ready to use for the keyword arguments, but not a tuple for the positional arguments. =20 - This function is not part of the `limited API `_. + This function is not part of the :ref:`limited API `. =20 .. versionadded:: 3.9 =20 @@ -410,7 +410,7 @@ please see individual documentation for details. Return the result of the call on success, or raise an exception and return *NULL* on failure. =20 - This function is not part of the `limited API `_. + This function is not part of the :ref:`limited API `. =20 .. versionadded:: 3.9 =20 From webhook-mailer at python.org Wed May 27 06:07:20 2020 From: webhook-mailer at python.org (Oleg =?utf-8?q?H=C3=B6fling?=) Date: Wed, 27 May 2020 10:07:20 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Fix the link to ncurses patch download in macos installer build script (GH-20421) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/7da46b676aed7111de34b57c8b942a7f3bb8= 0327 commit: 7da46b676aed7111de34b57c8b942a7f3bb80327 branch: master author: Oleg H=C3=B6fling committer: GitHub date: 2020-05-27T03:07:15-07:00 summary: Fix the link to ncurses patch download in macos installer build script (GH-20= 421) Reason: the link `ftp://invisible-island.net/ncurses//5.9/ncurses-5.9-2012061= 6-patch.sh.bz2` is dead, which prevents `Mac/BuildScript/build-installer.py` = from completing. Looks like the host of the FTP server was changed to `ftp.in= visible-island.net`, thus this proposal. Signed-off-by: oleg.hoefling files: M Mac/BuildScript/build-installer.py diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-insta= ller.py index a10601bed9f8c..76553c93a4957 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -302,7 +302,7 @@ def library_recipes(): "--libdir=3D/Library/Frameworks/Python.framework/Versions/= %s/lib"%(getVersion(),), ], patchscripts=3D[ - ("ftp://invisible-island.net/ncurses//5.9/ncurses-5.9-2012= 0616-patch.sh.bz2", + ("ftp://ftp.invisible-island.net/ncurses//5.9/ncurses-5.9-= 20120616-patch.sh.bz2", "f54bf02a349f96a7c4f0d00922f3a0d4"), ], useLDFlags=3DFalse,

{% trans %}Download these documents{% endtrans %}